Compare commits

...

11 Commits

Author SHA1 Message Date
Matt Hill
fe28a812a4 fix ssh, undeprecate wifi 2026-02-10 13:41:54 -07:00
Aiden McClelland
b6262c8e13 Fix PackageInfoShort to handle LocaleString on releaseNotes (#3112)
* Fix PackageInfoShort to handle LocaleString on releaseNotes

* fix: filter by target_version in get_matching_models and pass otherVersions from install

* chore: add exver documentation for ai agents
2026-02-09 19:42:03 +00:00
Matt Hill
ba740a9ee2 Multiple (#3111)
* fix alerts i18n, fix status display, better, remove usb media, hide shutdown for install complete

* trigger chnage detection for localize pipe and round out implementing localize pipe for consistency even though not needed
2026-02-09 12:41:29 -07:00
Aiden McClelland
f2142f0bb3 add documentation for ai agents (#3115)
* add documentation for ai agents

* docs: consolidate CLAUDE.md and CONTRIBUTING.md, add style guidelines

- Refactor CLAUDE.md to reference CONTRIBUTING.md for build/test/format info
- Expand CONTRIBUTING.md with comprehensive build targets, env vars, and testing
- Add code style guidelines section with conventional commits
- Standardize SDK prettier config to use single quotes (matching web)
- Add project-level Claude Code settings to disable co-author attribution

* style(sdk): apply prettier with single quotes

Run prettier across sdk/base and sdk/package to apply the
standardized quote style (single quotes matching web).

* docs: add USER.md for per-developer TODO filtering

- Add agents/USER.md to .gitignore (contains user identifier)
- Document session startup flow in CLAUDE.md:
  - Create USER.md if missing, prompting for identifier
  - Filter TODOs by @username tags
  - Offer relevant TODOs on session start

* docs: add i18n documentation task to agent TODOs

* docs: document i18n ID patterns in core/

Add agents/i18n-patterns.md covering rust-i18n setup, translation file
format, t!() macro usage, key naming conventions, and locale selection.
Remove completed TODO item and add reference in CLAUDE.md.

* chore: clarify that all builds work on any OS with Docker
2026-02-06 00:10:16 +01:00
gStart9
86ca23c093 Remove redundant https:// strings in start-tunnel installation output (#3114) 2026-02-05 23:22:31 +01:00
Dominion5254
463b6ca4ef propagate Error info (#3116) 2026-02-05 23:21:28 +01:00
Matt Hill
58e0b166cb move comment to safe place 2026-02-02 21:09:19 -07:00
Matt Hill
2a678bb017 fix warning and skip raspberrypi builds for now 2026-02-02 20:16:41 -07:00
Matt Hill
5664456b77 fix for buildjet 2026-02-02 18:51:11 -07:00
Matt Hill
3685b7e57e fix workflows 2026-02-02 18:37:13 -07:00
Matt Hill
989d5f73b1 fix --arch flag to fall back to emulation when native image unavailab… (#3108)
* fix --arch flag to fall back to emulation when native image unavailable, always infer hardware requirement for arch

* better handling of arch filter

* dont cancel in-progress commit workflows and abstract common setup

* cli improvements

fix group handling

* fix cli publish

* alpha.19

---------

Co-authored-by: Aiden McClelland <me@drbonez.dev>
2026-02-03 00:56:59 +00:00
333 changed files with 8186 additions and 6155 deletions

5
.claude/settings.json Normal file
View File

@@ -0,0 +1,5 @@
{
"attribution": {
"commit": ""
}
}

81
.github/actions/setup-build/action.yml vendored Normal file
View File

@@ -0,0 +1,81 @@
name: Setup Build Environment
description: Common build environment setup steps
inputs:
nodejs-version:
description: Node.js version
required: true
setup-python:
description: Set up Python
required: false
default: "false"
setup-docker:
description: Set up Docker QEMU and Buildx
required: false
default: "true"
setup-sccache:
description: Configure sccache for GitHub Actions
required: false
default: "true"
free-space:
description: Remove unnecessary packages to free disk space
required: false
default: "true"
runs:
using: composite
steps:
- name: Free disk space
if: inputs.free-space == 'true'
shell: bash
run: |
sudo apt-get remove --purge -y azure-cli || true
sudo apt-get remove --purge -y firefox || true
sudo apt-get remove --purge -y ghc-* || true
sudo apt-get remove --purge -y google-cloud-sdk || true
sudo apt-get remove --purge -y google-chrome-stable || true
sudo apt-get remove --purge -y powershell || true
sudo apt-get remove --purge -y php* || true
sudo apt-get remove --purge -y ruby* || true
sudo apt-get remove --purge -y mono-* || true
sudo apt-get autoremove -y
sudo apt-get clean
sudo rm -rf /usr/lib/jvm
sudo rm -rf /usr/local/.ghcup
sudo rm -rf /usr/local/lib/android
sudo rm -rf /usr/share/dotnet
sudo rm -rf /usr/share/swift
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
# BuildJet runners lack /opt/hostedtoolcache, which setup-python and setup-qemu expect
- name: Ensure hostedtoolcache exists
shell: bash
run: sudo mkdir -p /opt/hostedtoolcache && sudo chown $USER:$USER /opt/hostedtoolcache
- name: Set up Python
if: inputs.setup-python == 'true'
uses: actions/setup-python@v5
with:
python-version: "3.x"
- uses: actions/setup-node@v4
with:
node-version: ${{ inputs.nodejs-version }}
cache: npm
cache-dependency-path: "**/package-lock.json"
- name: Set up Docker QEMU
if: inputs.setup-docker == 'true'
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
if: inputs.setup-docker == 'true'
uses: docker/setup-buildx-action@v3
- name: Configure sccache
if: inputs.setup-sccache == 'true'
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');

View File

@@ -38,7 +38,7 @@ on:
- next/* - next/*
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.head.sha || github.sha }} group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: true cancel-in-progress: true
env: env:
@@ -48,6 +48,7 @@ env:
jobs: jobs:
compile: compile:
name: Build Debian Package name: Build Debian Package
if: github.event.pull_request.draft != true
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
@@ -64,50 +65,15 @@ jobs:
}} }}
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }} runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
steps: steps:
- name: Cleaning up unnecessary files - name: Mount tmpfs
run: |
sudo apt-get remove --purge -y mono-* \
ghc* cabal-install* \
dotnet* \
php* \
ruby* \
mysql-* \
postgresql-* \
azure-cli \
powershell \
google-cloud-sdk \
msodbcsql* mssql-tools* \
imagemagick* \
libgl1-mesa-dri \
google-chrome-stable \
firefox
sudo apt-get autoremove -y
sudo apt-get clean
- run: |
sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }} if: ${{ github.event.inputs.runner == 'fast' }}
run: sudo mount -t tmpfs tmpfs .
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
- uses: ./.github/actions/setup-build
- uses: actions/setup-node@v4
with: with:
node-version: ${{ env.NODEJS_VERSION }} nodejs-version: ${{ env.NODEJS_VERSION }}
- name: Set up docker QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure sccache
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
- name: Make - name: Make
run: TARGET=${{ matrix.triple }} make cli run: TARGET=${{ matrix.triple }} make cli

View File

@@ -36,7 +36,7 @@ on:
- next/* - next/*
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.head.sha || github.sha }} group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: true cancel-in-progress: true
env: env:
@@ -46,6 +46,7 @@ env:
jobs: jobs:
compile: compile:
name: Build Debian Package name: Build Debian Package
if: github.event.pull_request.draft != true
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
@@ -60,50 +61,15 @@ jobs:
}} }}
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }} runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
steps: steps:
- name: Cleaning up unnecessary files - name: Mount tmpfs
run: |
sudo apt-get remove --purge -y mono-* \
ghc* cabal-install* \
dotnet* \
php* \
ruby* \
mysql-* \
postgresql-* \
azure-cli \
powershell \
google-cloud-sdk \
msodbcsql* mssql-tools* \
imagemagick* \
libgl1-mesa-dri \
google-chrome-stable \
firefox
sudo apt-get autoremove -y
sudo apt-get clean
- run: |
sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }} if: ${{ github.event.inputs.runner == 'fast' }}
run: sudo mount -t tmpfs tmpfs .
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
- uses: ./.github/actions/setup-build
- uses: actions/setup-node@v4
with: with:
node-version: ${{ env.NODEJS_VERSION }} nodejs-version: ${{ env.NODEJS_VERSION }}
- name: Set up docker QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure sccache
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
- name: Make - name: Make
run: make registry-deb run: make registry-deb

View File

@@ -36,7 +36,7 @@ on:
- next/* - next/*
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.head.sha || github.sha }} group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: true cancel-in-progress: true
env: env:
@@ -46,6 +46,7 @@ env:
jobs: jobs:
compile: compile:
name: Build Debian Package name: Build Debian Package
if: github.event.pull_request.draft != true
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
@@ -60,50 +61,15 @@ jobs:
}} }}
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }} runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
steps: steps:
- name: Cleaning up unnecessary files - name: Mount tmpfs
run: |
sudo apt-get remove --purge -y mono-* \
ghc* cabal-install* \
dotnet* \
php* \
ruby* \
mysql-* \
postgresql-* \
azure-cli \
powershell \
google-cloud-sdk \
msodbcsql* mssql-tools* \
imagemagick* \
libgl1-mesa-dri \
google-chrome-stable \
firefox
sudo apt-get autoremove -y
sudo apt-get clean
- run: |
sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }} if: ${{ github.event.inputs.runner == 'fast' }}
run: sudo mount -t tmpfs tmpfs .
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
- uses: ./.github/actions/setup-build
- uses: actions/setup-node@v4
with: with:
node-version: ${{ env.NODEJS_VERSION }} nodejs-version: ${{ env.NODEJS_VERSION }}
- name: Set up docker QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure sccache
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
- name: Make - name: Make
run: make tunnel-deb run: make tunnel-deb

View File

@@ -27,7 +27,7 @@ on:
- x86_64-nonfree - x86_64-nonfree
- aarch64 - aarch64
- aarch64-nonfree - aarch64-nonfree
- raspberrypi # - raspberrypi
- riscv64 - riscv64
deploy: deploy:
type: choice type: choice
@@ -46,7 +46,7 @@ on:
- next/* - next/*
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.head.sha || github.sha }} group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: true cancel-in-progress: true
env: env:
@@ -56,6 +56,7 @@ env:
jobs: jobs:
compile: compile:
name: Compile Base Binaries name: Compile Base Binaries
if: github.event.pull_request.draft != true
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
@@ -90,54 +91,16 @@ jobs:
)[github.event.inputs.runner == 'fast'] )[github.event.inputs.runner == 'fast']
}} }}
steps: steps:
- name: Cleaning up unnecessary files - name: Mount tmpfs
run: |
sudo apt-get remove --purge -y azure-cli || true
sudo apt-get remove --purge -y firefox || true
sudo apt-get remove --purge -y ghc-* || true
sudo apt-get remove --purge -y google-cloud-sdk || true
sudo apt-get remove --purge -y google-chrome-stable || true
sudo apt-get remove --purge -y powershell || true
sudo apt-get remove --purge -y php* || true
sudo apt-get remove --purge -y ruby* || true
sudo apt-get remove --purge -y mono-* || true
sudo apt-get autoremove -y
sudo apt-get clean
sudo rm -rf /usr/lib/jvm # All JDKs
sudo rm -rf /usr/local/.ghcup # Haskell toolchain
sudo rm -rf /usr/local/lib/android # Android SDK/NDK, emulator
sudo rm -rf /usr/share/dotnet # .NET SDKs
sudo rm -rf /usr/share/swift # Swift toolchain (if present)
sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.)
- run: |
sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }} if: ${{ github.event.inputs.runner == 'fast' }}
run: sudo mount -t tmpfs tmpfs .
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
- uses: ./.github/actions/setup-build
- name: Set up Python
uses: actions/setup-python@v5
with: with:
python-version: "3.x" nodejs-version: ${{ env.NODEJS_VERSION }}
setup-python: "true"
- uses: actions/setup-node@v4
with:
node-version: ${{ env.NODEJS_VERSION }}
- name: Set up docker QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure sccache
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
- name: Make - name: Make
run: make ARCH=${{ matrix.arch }} compiled-${{ matrix.arch }}.tar run: make ARCH=${{ matrix.arch }} compiled-${{ matrix.arch }}.tar
@@ -155,13 +118,14 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
# TODO: re-add "raspberrypi" to the platform list below
platform: >- platform: >-
${{ ${{
fromJson( fromJson(
format( format(
'[ '[
["{0}"], ["{0}"],
["x86_64", "x86_64-nonfree", "aarch64", "aarch64-nonfree", "riscv64", "raspberrypi"] ["x86_64", "x86_64-nonfree", "aarch64", "aarch64-nonfree", "riscv64"]
]', ]',
github.event.inputs.platform || 'ALL' github.event.inputs.platform || 'ALL'
) )
@@ -225,6 +189,10 @@ jobs:
sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.) sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.)
if: ${{ github.event.inputs.runner != 'fast' }} if: ${{ github.event.inputs.runner != 'fast' }}
# BuildJet runners lack /opt/hostedtoolcache, which setup-qemu expects
- name: Ensure hostedtoolcache exists
run: sudo mkdir -p /opt/hostedtoolcache && sudo chown $USER:$USER /opt/hostedtoolcache
- name: Set up docker QEMU - name: Set up docker QEMU
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v3

View File

@@ -11,7 +11,7 @@ on:
- next/* - next/*
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.head.sha || github.sha }} group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: true cancel-in-progress: true
env: env:
@@ -21,15 +21,18 @@ env:
jobs: jobs:
test: test:
name: Run Automated Tests name: Run Automated Tests
if: github.event.pull_request.draft != true
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
- uses: ./.github/actions/setup-build
- uses: actions/setup-node@v4
with: with:
node-version: ${{ env.NODEJS_VERSION }} nodejs-version: ${{ env.NODEJS_VERSION }}
free-space: "false"
setup-docker: "false"
setup-sccache: "false"
- name: Build And Run Tests - name: Build And Run Tests
run: make test run: make test

4
.gitignore vendored
View File

@@ -19,4 +19,6 @@ secrets.db
/compiled.tar /compiled.tar
/compiled-*.tar /compiled-*.tar
/build/lib/firmware /build/lib/firmware
tmp tmp
web/.i18n-checked
agents/USER.md

146
CLAUDE.md Normal file
View File

@@ -0,0 +1,146 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
StartOS is an open-source Linux distribution for running personal servers. It manages discovery, installation, network configuration, backups, and health monitoring of self-hosted services.
**Tech Stack:**
- Backend: Rust (async/Tokio, Axum web framework)
- Frontend: Angular 20 + TypeScript + TaigaUI
- Container runtime: Node.js/TypeScript with LXC
- Database/State: Patch-DB (git submodule) - storage layer with reactive frontend sync
- API: JSON-RPC via rpc-toolkit (see `agents/rpc-toolkit.md`)
- Auth: Password + session cookie, public/private key signatures, local authcookie (see `core/src/middleware/auth/`)
## Build & Development
See [CONTRIBUTING.md](CONTRIBUTING.md) for:
- Environment setup and requirements
- Build commands and make targets
- Testing and formatting commands
- Environment variables
**Quick reference:**
```bash
. ./devmode.sh # Enable dev mode
make update-startbox REMOTE=start9@<ip> # Fastest iteration (binary + UI)
make test-core # Run Rust tests
```
## Architecture
### Core (`/core`)
The Rust backend daemon. Main binaries:
- `startbox` - Main daemon (runs as `startd`)
- `start-cli` - CLI interface
- `start-container` - Runs inside LXC containers; communicates with host and manages subcontainers
- `registrybox` - Registry daemon
- `tunnelbox` - VPN/tunnel daemon
**Key modules:**
- `src/context/` - Context types (RpcContext, CliContext, InitContext, DiagnosticContext)
- `src/service/` - Service lifecycle management with actor pattern (`service_actor.rs`)
- `src/db/model/` - Patch-DB models (`public.rs` synced to frontend, `private.rs` backend-only)
- `src/net/` - Networking (DNS, ACME, WiFi, Tor via Arti, WireGuard)
- `src/s9pk/` - S9PK package format (merkle archive)
- `src/registry/` - Package registry management
**RPC Pattern:** See `agents/rpc-toolkit.md`
### Web (`/web`)
Angular projects sharing common code:
- `projects/ui/` - Main admin interface
- `projects/setup-wizard/` - Initial setup
- `projects/start-tunnel/` - VPN management UI
- `projects/shared/` - Common library (API clients, components)
- `projects/marketplace/` - Service discovery
**Development:**
```bash
cd web
npm ci
npm run start:ui # Dev server with mocks
npm run build:ui # Production build
npm run check # Type check all projects
```
### Container Runtime (`/container-runtime`)
Node.js runtime that manages service containers via RPC. See `RPCSpec.md` for protocol.
**Container Architecture:**
```
LXC Container (uniform base for all services)
└── systemd
└── container-runtime.service
└── Loads /usr/lib/startos/package/index.js (from s9pk javascript.squashfs)
└── Package JS launches subcontainers (from images in s9pk)
```
The container runtime communicates with the host via JSON-RPC over Unix socket. Package JavaScript must export functions conforming to the `ABI` type defined in `sdk/base/lib/types.ts`.
**`/media/startos/` directory (mounted by host into container):**
| Path | Description |
|------|-------------|
| `volumes/<name>/` | Package data volumes (id-mapped, persistent) |
| `assets/` | Read-only assets from s9pk `assets.squashfs` |
| `images/<name>/` | Container images (squashfs, used for subcontainers) |
| `images/<name>.env` | Environment variables for image |
| `images/<name>.json` | Image metadata |
| `backup/` | Backup mount point (mounted during backup operations) |
| `rpc/service.sock` | RPC socket (container runtime listens here) |
| `rpc/host.sock` | Host RPC socket (for effects callbacks to host) |
**S9PK Structure:** See `agents/s9pk-structure.md`
### SDK (`/sdk`)
TypeScript SDK for packaging services (`@start9labs/start-sdk`).
- `base/` - Core types, ABI definitions, effects interface (`@start9labs/start-sdk-base`)
- `package/` - Full SDK for package developers, re-exports base
### Patch-DB (`/patch-db`)
Git submodule providing diff-based state synchronization. Changes to `db/model/public.rs` automatically sync to the frontend.
**Key patterns:**
- `db.peek().await` - Get a read-only snapshot of the database state
- `db.mutate(|db| { ... }).await` - Apply mutations atomically, returns `MutateResult`
- `#[derive(HasModel)]` - Derive macro for types stored in the database, generates typed accessors
**Generated accessor types** (from `HasModel` derive):
- `as_field()` - Immutable reference: `&Model<T>`
- `as_field_mut()` - Mutable reference: `&mut Model<T>`
- `into_field()` - Owned value: `Model<T>`
**`Model<T>` APIs** (from `db/prelude.rs`):
- `.de()` - Deserialize to `T`
- `.ser(&value)` - Serialize from `T`
- `.mutate(|v| ...)` - Deserialize, mutate, reserialize
- For maps: `.keys()`, `.as_idx(&key)`, `.as_idx_mut(&key)`, `.insert()`, `.remove()`, `.contains_key()`
## Supplementary Documentation
The `agents/` directory contains detailed documentation for AI assistants:
- `TODO.md` - Pending tasks for AI agents (check this first, remove items when completed)
- `USER.md` - Current user identifier (gitignored, see below)
- `rpc-toolkit.md` - JSON-RPC patterns and handler configuration
- `core-rust-patterns.md` - Common utilities and patterns for Rust code in `/core` (guard pattern, mount guards, etc.)
- `s9pk-structure.md` - S9PK package format structure
- `i18n-patterns.md` - Internationalization key conventions and usage in `/core`
### Session Startup
On startup:
1. **Check for `agents/USER.md`** - If it doesn't exist, prompt the user for their name/identifier and create it. This file is gitignored since it varies per developer.
2. **Check `agents/TODO.md` for relevant tasks** - Show TODOs that either:
- Have no `@username` tag (relevant to everyone)
- Are tagged with the current user's identifier
Skip TODOs tagged with a different user.
3. **Ask "What would you like to do today?"** - Offer options for each relevant TODO item, plus "Something else" for other requests.

View File

@@ -11,123 +11,190 @@ This guide is for contributing to the StartOS. If you are interested in packagin
```bash ```bash
/ /
├── assets/ ├── assets/ # Screenshots for README
├── container-runtime/ ├── build/ # Auxiliary files and scripts for deployed images
├── core/ ├── container-runtime/ # Node.js program managing package containers
├── build/ ├── core/ # Rust backend: API, daemon (startd), CLI (start-cli)
├── debian/ ├── debian/ # Debian package maintainer scripts
├── web/ ├── image-recipe/ # Scripts for building StartOS images
├── image-recipe/ ├── patch-db/ # (submodule) Diff-based data store for frontend sync
├── patch-db ├── sdk/ # TypeScript SDK for building StartOS packages
└── sdk/ └── web/ # Web UIs (Angular)
``` ```
#### assets See component READMEs for details:
- [`core`](core/README.md)
screenshots for the StartOS README - [`web`](web/README.md)
- [`build`](build/README.md)
#### container-runtime - [`patch-db`](https://github.com/Start9Labs/patch-db)
A NodeJS program that dynamically loads maintainer scripts and communicates with the OS to manage packages
#### core
An API, daemon (startd), and CLI (start-cli) that together provide the core functionality of StartOS.
#### build
Auxiliary files and scripts to include in deployed StartOS images
#### debian
Maintainer scripts for the StartOS Debian package
#### web
Web UIs served under various conditions and used to interact with StartOS APIs.
#### image-recipe
Scripts for building StartOS images
#### patch-db (submodule)
A diff based data store used to synchronize data between the web interfaces and server.
#### sdk
A typescript sdk for building start-os packages
## Environment Setup ## Environment Setup
#### Clone the StartOS repository
```sh ```sh
git clone https://github.com/Start9Labs/start-os.git --recurse-submodules git clone https://github.com/Start9Labs/start-os.git --recurse-submodules
cd start-os cd start-os
``` ```
#### Continue to your project of interest for additional instructions: ### Development Mode
- [`core`](core/README.md) For faster iteration during development:
- [`web-interfaces`](web-interfaces/README.md)
- [`build`](build/README.md) ```sh
- [`patch-db`](https://github.com/Start9Labs/patch-db) . ./devmode.sh
```
This sets `ENVIRONMENT=dev` and `GIT_BRANCH_AS_HASH=1` to prevent rebuilds on every commit.
## Building ## Building
This project uses [GNU Make](https://www.gnu.org/software/make/) to build its components. To build any specific component, simply run `make <TARGET>` replacing `<TARGET>` with the name of the target you'd like to build All builds can be performed on any operating system that can run Docker.
This project uses [GNU Make](https://www.gnu.org/software/make/) to build its components.
### Requirements ### Requirements
- [GNU Make](https://www.gnu.org/software/make/) - [GNU Make](https://www.gnu.org/software/make/)
- [Docker](https://docs.docker.com/get-docker/) - [Docker](https://docs.docker.com/get-docker/) or [Podman](https://podman.io/)
- [NodeJS v20.16.0](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm) - [NodeJS v20.16.0](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)
- [sed](https://www.gnu.org/software/sed/) - [Rust](https://rustup.rs/) (nightly for formatting)
- [grep](https://www.gnu.org/software/grep/) - [sed](https://www.gnu.org/software/sed/), [grep](https://www.gnu.org/software/grep/), [awk](https://www.gnu.org/software/gawk/)
- [awk](https://www.gnu.org/software/gawk/)
- [jq](https://jqlang.github.io/jq/) - [jq](https://jqlang.github.io/jq/)
- [gzip](https://www.gnu.org/software/gzip/) - [gzip](https://www.gnu.org/software/gzip/), [brotli](https://github.com/google/brotli)
- [brotli](https://github.com/google/brotli)
### Environment variables ### Environment Variables
- `PLATFORM`: which platform you would like to build for. Must be one of `x86_64`, `x86_64-nonfree`, `aarch64`, `aarch64-nonfree`, `raspberrypi` | Variable | Description |
- NOTE: `nonfree` images are for including `nonfree` firmware packages in the built ISO |----------|-------------|
- `ENVIRONMENT`: a hyphen separated set of feature flags to enable | `PLATFORM` | Target platform: `x86_64`, `x86_64-nonfree`, `aarch64`, `aarch64-nonfree`, `riscv64`, `raspberrypi` |
- `dev`: enables password ssh (INSECURE!) and does not compress frontends | `ENVIRONMENT` | Hyphen-separated feature flags (see below) |
- `unstable`: enables assertions that will cause errors on unexpected inconsistencies that are undesirable in production use either for performance or reliability reasons | `PROFILE` | Build profile: `release` (default) or `dev` |
- `docker`: use `docker` instead of `podman` | `GIT_BRANCH_AS_HASH` | Set to `1` to use git branch name as version hash (avoids rebuilds) |
- `GIT_BRANCH_AS_HASH`: set to `1` to use the current git branch name as the git hash so that the project does not need to be rebuilt on each commit
### Useful Make Targets **ENVIRONMENT flags:**
- `dev` - Enables password SSH before setup, skips frontend compression
- `unstable` - Enables assertions and debugging with performance penalty
- `console` - Enables tokio-console for async debugging
**Platform notes:**
- `-nonfree` variants include proprietary firmware and drivers
- `raspberrypi` includes non-free components by necessity
- Platform is remembered between builds if not specified
### Make Targets
#### Building
| Target | Description |
|--------|-------------|
| `iso` | Create full `.iso` image (not for raspberrypi) |
| `img` | Create full `.img` image (raspberrypi only) |
| `deb` | Build Debian package |
| `all` | Build all Rust binaries |
| `uis` | Build all web UIs |
| `ui` | Build main UI only |
| `ts-bindings` | Generate TypeScript bindings from Rust types |
#### Deploying to Device
For devices on the same network:
| Target | Description |
|--------|-------------|
| `update-startbox REMOTE=start9@<ip>` | Deploy binary + UI only (fastest) |
| `update-deb REMOTE=start9@<ip>` | Deploy full Debian package |
| `update REMOTE=start9@<ip>` | OTA-style update |
| `reflash REMOTE=start9@<ip>` | Reflash as if using live ISO |
| `update-overlay REMOTE=start9@<ip>` | Deploy to in-memory overlay (reverts on reboot) |
For devices on different networks (uses [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)):
| Target | Description |
|--------|-------------|
| `wormhole` | Send startbox binary |
| `wormhole-deb` | Send Debian package |
| `wormhole-squashfs` | Send squashfs image |
#### Other
| Target | Description |
|--------|-------------|
| `format` | Run code formatting (Rust nightly required) |
| `test` | Run all automated tests |
| `test-core` | Run Rust tests |
| `test-sdk` | Run SDK tests |
| `test-container-runtime` | Run container runtime tests |
| `clean` | Delete all compiled artifacts |
## Testing
```bash
make test # All tests
make test-core # Rust tests (via ./core/run-tests.sh)
make test-sdk # SDK tests
make test-container-runtime # Container runtime tests
# Run specific Rust test
cd core && cargo test <test_name> --features=test
```
## Code Formatting
```bash
# Rust (requires nightly)
make format
# TypeScript/HTML/SCSS (web)
cd web && npm run format
```
## Code Style Guidelines
### Formatting
Run the formatters before committing. Configuration is handled by `rustfmt.toml` (Rust) and prettier configs (TypeScript).
### Documentation & Comments
**Rust:**
- Add doc comments (`///`) to public APIs, structs, and non-obvious functions
- Use `//` comments sparingly for complex logic that isn't self-evident
- Prefer self-documenting code (clear naming, small functions) over comments
**TypeScript:**
- Document exported functions and complex types with JSDoc
- Keep comments focused on "why" rather than "what"
**General:**
- Don't add comments that just restate the code
- Update or remove comments when code changes
- TODOs should include context: `// TODO(username): reason`
### Commit Messages
Use [Conventional Commits](https://www.conventionalcommits.org/):
```
<type>(<scope>): <description>
[optional body]
[optional footer]
```
**Types:**
- `feat` - New feature
- `fix` - Bug fix
- `docs` - Documentation only
- `style` - Formatting, no code change
- `refactor` - Code change that neither fixes a bug nor adds a feature
- `test` - Adding or updating tests
- `chore` - Build process, dependencies, etc.
**Examples:**
```
feat(web): add dark mode toggle
fix(core): resolve race condition in service startup
docs: update CONTRIBUTING.md with style guidelines
refactor(sdk): simplify package validation logic
```
- `iso`: Create a full `.iso` image
- Only possible from Debian
- Not available for `PLATFORM=raspberrypi`
- Additional Requirements:
- [debspawn](https://github.com/lkhq/debspawn)
- `img`: Create a full `.img` image
- Only possible from Debian
- Only available for `PLATFORM=raspberrypi`
- Additional Requirements:
- [debspawn](https://github.com/lkhq/debspawn)
- `format`: Run automatic code formatting for the project
- Additional Requirements:
- [rust](https://rustup.rs/)
- `test`: Run automated tests for the project
- Additional Requirements:
- [rust](https://rustup.rs/)
- `update`: Deploy the current working project to a device over ssh as if through an over-the-air update
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
- `reflash`: Deploy the current working project to a device over ssh as if using a live `iso` image to reflash it
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
- `update-overlay`: Deploy the current working project to a device over ssh to the in-memory overlay without restarting it
- WARNING: changes will be reverted after the device is rebooted
- WARNING: changes to `init` will not take effect as the device is already initialized
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
- `wormhole`: Deploy the `startbox` to a device using [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)
- When the build it complete will emit a command to paste into the shell of the device to upgrade it
- Additional Requirements:
- [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)
- `clean`: Delete all compiled artifacts

9
agents/TODO.md Normal file
View File

@@ -0,0 +1,9 @@
# AI Agent TODOs
Pending tasks for AI agents. Remove items when completed.
## Unreviewed CLAUDE.md Sections
- [ ] Architecture - Web (`/web`) - @MattDHill

View File

@@ -0,0 +1,249 @@
# Utilities & Patterns
This document covers common utilities and patterns used throughout the StartOS codebase.
## Util Module (`core/src/util/`)
The `util` module contains reusable utilities. Key submodules:
| Module | Purpose |
|--------|---------|
| `actor/` | Actor pattern implementation for concurrent state management |
| `collections/` | Custom collection types |
| `crypto.rs` | Cryptographic utilities (encryption, hashing) |
| `future.rs` | Future/async utilities |
| `io.rs` | File I/O helpers (create_file, canonicalize, etc.) |
| `iter.rs` | Iterator extensions |
| `net.rs` | Network utilities |
| `rpc.rs` | RPC helpers |
| `rpc_client.rs` | RPC client utilities |
| `serde.rs` | Serialization helpers (Base64, display/fromstr, etc.) |
| `sync.rs` | Synchronization primitives (SyncMutex, etc.) |
## Command Invocation (`Invoke` trait)
The `Invoke` trait provides a clean way to run external commands with error handling:
```rust
use crate::util::Invoke;
// Simple invocation
tokio::process::Command::new("ls")
.arg("-la")
.invoke(ErrorKind::Filesystem)
.await?;
// With timeout
tokio::process::Command::new("slow-command")
.timeout(Some(Duration::from_secs(30)))
.invoke(ErrorKind::Timeout)
.await?;
// With input
let mut input = Cursor::new(b"input data");
tokio::process::Command::new("cat")
.input(Some(&mut input))
.invoke(ErrorKind::Filesystem)
.await?;
// Piped commands
tokio::process::Command::new("cat")
.arg("file.txt")
.pipe(&mut tokio::process::Command::new("grep").arg("pattern"))
.invoke(ErrorKind::Filesystem)
.await?;
```
## Guard Pattern
Guards ensure cleanup happens when they go out of scope.
### `GeneralGuard` / `GeneralBoxedGuard`
For arbitrary cleanup actions:
```rust
use crate::util::GeneralGuard;
let guard = GeneralGuard::new(|| {
println!("Cleanup runs on drop");
});
// Do work...
// Explicit drop with action
guard.drop();
// Or skip the action
// guard.drop_without_action();
```
### `FileLock`
File-based locking with automatic unlock:
```rust
use crate::util::FileLock;
let lock = FileLock::new("/path/to/lockfile", true).await?; // blocking=true
// Lock held until dropped or explicitly unlocked
lock.unlock().await?;
```
## Mount Guard Pattern (`core/src/disk/mount/guard.rs`)
RAII guards for filesystem mounts. Ensures filesystems are unmounted when guards are dropped.
### `MountGuard`
Basic mount guard:
```rust
use crate::disk::mount::guard::MountGuard;
use crate::disk::mount::filesystem::{MountType, ReadOnly};
let guard = MountGuard::mount(&filesystem, "/mnt/target", ReadOnly).await?;
// Use the mounted filesystem at guard.path()
do_something(guard.path()).await?;
// Explicit unmount (or auto-unmounts on drop)
guard.unmount(false).await?; // false = don't delete mountpoint
```
### `TmpMountGuard`
Reference-counted temporary mount (mounts to `/media/startos/tmp/`):
```rust
use crate::disk::mount::guard::TmpMountGuard;
use crate::disk::mount::filesystem::ReadOnly;
// Multiple clones share the same mount
let guard1 = TmpMountGuard::mount(&filesystem, ReadOnly).await?;
let guard2 = guard1.clone();
// Mount stays alive while any guard exists
// Auto-unmounts when last guard is dropped
```
### `GenericMountGuard` trait
All mount guards implement this trait:
```rust
pub trait GenericMountGuard: std::fmt::Debug + Send + Sync + 'static {
fn path(&self) -> &Path;
fn unmount(self) -> impl Future<Output = Result<(), Error>> + Send;
}
```
### `SubPath`
Wraps a mount guard to point to a subdirectory:
```rust
use crate::disk::mount::guard::SubPath;
let mount = TmpMountGuard::mount(&filesystem, ReadOnly).await?;
let subdir = SubPath::new(mount, "data/subdir");
// subdir.path() returns the full path including subdirectory
```
## FileSystem Implementations (`core/src/disk/mount/filesystem/`)
Various filesystem types that can be mounted:
| Type | Description |
|------|-------------|
| `bind.rs` | Bind mounts |
| `block_dev.rs` | Block device mounts |
| `cifs.rs` | CIFS/SMB network shares |
| `ecryptfs.rs` | Encrypted filesystem |
| `efivarfs.rs` | EFI variables |
| `httpdirfs.rs` | HTTP directory as filesystem |
| `idmapped.rs` | ID-mapped mounts |
| `label.rs` | Mount by label |
| `loop_dev.rs` | Loop device mounts |
| `overlayfs.rs` | Overlay filesystem |
## Other Useful Utilities
### `Apply` / `ApplyRef` traits
Fluent method chaining:
```rust
use crate::util::Apply;
let result = some_value
.apply(|v| transform(v))
.apply(|v| another_transform(v));
```
### `Container<T>`
Async-safe optional container:
```rust
use crate::util::Container;
let container = Container::new(None);
container.set(value).await;
let taken = container.take().await;
```
### `HashWriter<H, W>`
Write data while computing hash:
```rust
use crate::util::HashWriter;
use sha2::Sha256;
let writer = HashWriter::new(Sha256::new(), file);
// Write data...
let (hasher, file) = writer.finish();
let hash = hasher.finalize();
```
### `Never` type
Uninhabited type for impossible cases:
```rust
use crate::util::Never;
fn impossible() -> Never {
// This function can never return
}
let never: Never = impossible();
never.absurd::<String>() // Can convert to any type
```
### `MaybeOwned<'a, T>`
Either borrowed or owned data:
```rust
use crate::util::MaybeOwned;
fn accept_either(data: MaybeOwned<'_, String>) {
// Use &*data to access the value
}
accept_either(MaybeOwned::from(&existing_string));
accept_either(MaybeOwned::from(owned_string));
```
### `new_guid()`
Generate a random GUID:
```rust
use crate::util::new_guid;
let guid = new_guid(); // Returns InternedString
```

301
agents/exver.md Normal file
View File

@@ -0,0 +1,301 @@
# exver — Extended Versioning
Extended semver supporting **downstream versioning** (wrapper updates independent of upstream) and **flavors** (package fork variants).
Two implementations exist:
- **Rust crate** (`exver`) — used in `core/`. Source: https://github.com/Start9Labs/exver-rs
- **TypeScript** (`sdk/base/lib/exver/index.ts`) — used in `sdk/` and `web/`
Both parse the same string format and agree on `satisfies` semantics.
## Version Format
An **ExtendedVersion** string looks like:
```
[#flavor:]upstream:downstream
```
- **upstream** — the original package version (semver-style: `1.2.3`, `1.2.3-beta.1`)
- **downstream** — the StartOS wrapper version (incremented independently)
- **flavor** — optional lowercase ASCII prefix for fork variants
Examples:
- `1.2.3:0` — upstream 1.2.3, first downstream release
- `1.2.3:2` — upstream 1.2.3, third downstream release
- `#bitcoin:21.0:1` — bitcoin flavor, upstream 21.0, downstream 1
- `1.0.0-rc.1:0` — upstream with prerelease tag
## Core Types
### `Version`
A semver-style version with arbitrary digit segments and optional prerelease.
**Rust:**
```rust
use exver::Version;
let v = Version::new([1, 2, 3], []); // 1.2.3
let v = Version::new([1, 0], ["beta".into()]); // 1.0-beta
let v: Version = "1.2.3".parse().unwrap();
v.number() // &[1, 2, 3]
v.prerelease() // &[]
```
**TypeScript:**
```typescript
const v = new Version([1, 2, 3], [])
const v = Version.parse("1.2.3")
v.number // number[]
v.prerelease // (string | number)[]
v.compare(other) // 'greater' | 'equal' | 'less'
v.compareForSort(other) // -1 | 0 | 1
```
Default: `0`
### `ExtendedVersion`
The primary version type. Wraps upstream + downstream `Version` plus an optional flavor.
**Rust:**
```rust
use exver::ExtendedVersion;
let ev = ExtendedVersion::new(
Version::new([1, 2, 3], []),
Version::default(), // downstream = 0
);
let ev: ExtendedVersion = "1.2.3:0".parse().unwrap();
ev.flavor() // Option<&str>
ev.upstream() // &Version
ev.downstream() // &Version
// Builder methods (consuming):
ev.with_flavor("bitcoin")
ev.without_flavor()
ev.map_upstream(|v| ...)
ev.map_downstream(|v| ...)
```
**TypeScript:**
```typescript
const ev = new ExtendedVersion(null, upstream, downstream)
const ev = ExtendedVersion.parse("1.2.3:0")
const ev = ExtendedVersion.parseEmver("1.2.3.4") // emver compat
ev.flavor // string | null
ev.upstream // Version
ev.downstream // Version
ev.compare(other) // 'greater' | 'equal' | 'less' | null
ev.equals(other) // boolean
ev.greaterThan(other) // boolean
ev.lessThan(other) // boolean
ev.incrementMajor() // new ExtendedVersion
ev.incrementMinor() // new ExtendedVersion
```
**Ordering:** Versions with different flavors are **not comparable** (`PartialOrd`/`compare` returns `None`/`null`).
Default: `0:0`
### `VersionString` (Rust only, StartOS wrapper)
Defined in `core/src/util/version.rs`. Caches the original string representation alongside the parsed `ExtendedVersion`. Used as the key type in registry version maps.
```rust
use crate::util::VersionString;
let vs: VersionString = "1.2.3:0".parse().unwrap();
let vs = VersionString::from(extended_version);
// Deref to ExtendedVersion:
vs.satisfies(&range);
vs.upstream();
// String access:
vs.as_str(); // &str
AsRef::<str>::as_ref(&vs);
```
`Ord` is implemented with a total ordering — versions with different flavors are ordered by flavor name (unflavored sorts last).
### `VersionRange`
A predicate over `ExtendedVersion`. Supports comparison operators, boolean logic, and flavor constraints.
**Rust:**
```rust
use exver::VersionRange;
// Constructors:
VersionRange::any() // matches everything
VersionRange::none() // matches nothing
VersionRange::exactly(ev) // = ev
VersionRange::anchor(GTE, ev) // >= ev
VersionRange::caret(ev) // ^ev (compatible changes)
VersionRange::tilde(ev) // ~ev (patch-level changes)
// Combinators (smart — eagerly simplify):
VersionRange::and(a, b) // a && b
VersionRange::or(a, b) // a || b
VersionRange::not(a) // !a
// Parsing:
let r: VersionRange = ">=1.0.0:0".parse().unwrap();
let r: VersionRange = "^1.2.3:0".parse().unwrap();
let r: VersionRange = ">=1.0.0 <2.0.0".parse().unwrap(); // implicit AND
let r: VersionRange = ">=1.0.0 || >=2.0.0".parse().unwrap();
let r: VersionRange = "#bitcoin".parse().unwrap(); // flavor match
let r: VersionRange = "*".parse().unwrap(); // any
// Monoid wrappers for folding:
AnyRange // fold with or, empty = None
AllRange // fold with and, empty = Any
```
**TypeScript:**
```typescript
// Constructors:
VersionRange.any()
VersionRange.none()
VersionRange.anchor('=', ev)
VersionRange.anchor('>=', ev)
VersionRange.anchor('^', ev) // ^ and ~ are first-class operators
VersionRange.anchor('~', ev)
VersionRange.flavor(null) // match unflavored versions
VersionRange.flavor("bitcoin") // match #bitcoin versions
// Combinators — static (smart, variadic):
VersionRange.and(a, b, c, ...)
VersionRange.or(a, b, c, ...)
// Combinators — instance (not smart, just wrap):
range.and(other)
range.or(other)
range.not()
// Parsing:
VersionRange.parse(">=1.0.0:0")
VersionRange.parseEmver(">=1.2.3.4") // emver compat
// Analysis (TS only):
range.normalize() // canonical form (see below)
range.satisfiable() // boolean
range.intersects(other) // boolean
```
**Checking satisfaction:**
```rust
// Rust:
version.satisfies(&range) // bool
```
```typescript
// TypeScript:
version.satisfies(range) // boolean
range.satisfiedBy(version) // boolean (convenience)
```
Also available on `Version` (wraps in `ExtendedVersion` with downstream=0).
When no operator is specified in a range string, `^` (caret) is the default.
## Operators
| Syntax | Rust | TS | Meaning |
|--------|------|----|---------|
| `=` | `EQ` | `'='` | Equal |
| `!=` | `NEQ` | `'!='` | Not equal |
| `>` | `GT` | `'>'` | Greater than |
| `>=` | `GTE` | `'>='` | Greater than or equal |
| `<` | `LT` | `'<'` | Less than |
| `<=` | `LTE` | `'<='` | Less than or equal |
| `^` | expanded to `And(GTE, LT)` | `'^'` | Compatible (first non-zero digit unchanged) |
| `~` | expanded to `And(GTE, LT)` | `'~'` | Patch-level (minor unchanged) |
## Flavor Rules
- Versions with **different flavors** never satisfy comparison operators (except `!=`, which returns true)
- `VersionRange::Flavor(Some("bitcoin"))` matches only `#bitcoin:*` versions
- `VersionRange::Flavor(None)` matches only unflavored versions
- Flavor constraints compose with `and`/`or`/`not` like any other range
## Reduction and Normalization
### Rust: `reduce()` (shallow)
`VersionRange::reduce(self) -> Self` re-applies smart constructor rules to one level of the AST. Useful for simplifying a node that was constructed directly (e.g. deserialized) rather than through the smart constructors.
**Smart constructor rules applied by `and`, `or`, `not`, and `reduce`:**
`and`:
- `and(Any, b) → b`, `and(a, Any) → a`
- `and(None, _) → None`, `and(_, None) → None`
`or`:
- `or(Any, _) → Any`, `or(_, Any) → Any`
- `or(None, b) → b`, `or(a, None) → a`
`not`:
- `not(=v) → !=v`, `not(!=v) → =v`
- `not(and(a, b)) → or(not(a), not(b))` (De Morgan)
- `not(or(a, b)) → and(not(a), not(b))` (De Morgan)
- `not(not(a)) → a`
- `not(Any) → None`, `not(None) → Any`
### TypeScript: `normalize()` (deep, canonical)
`VersionRange.normalize(): VersionRange` in `sdk/base/lib/exver/index.ts` performs full normalization by converting the range AST into a canonical form. This is a deep operation that produces a semantically equivalent but simplified range.
**How it works:**
1. **`tables()`** — Converts the VersionRange AST into truth tables (`VersionRangeTable`). Each table is a number line split at version boundary points, with boolean values for each segment indicating whether versions in that segment satisfy the range. Separate tables are maintained per flavor (and for flavor negations).
2. **`VersionRangeTable.zip(a, b, func)`** — Merges two tables by walking their boundary points in sorted order and applying a boolean function (`&&` or `||`) to combine segment values. Adjacent segments with the same boolean value are collapsed automatically.
3. **`VersionRangeTable.and/or/not`** — Table-level boolean operations. `and` computes the cross-product of flavor tables (since `#a && #b` for different flavors is unsatisfiable). `not` inverts all segment values.
4. **`VersionRangeTable.collapse()`** — Checks if a table is uniformly true or false across all flavors and segments. Returns `true`, `false`, or `null` (mixed).
5. **`VersionRangeTable.minterms()`** — Converts truth tables back into a VersionRange AST in [sum-of-products](https://en.wikipedia.org/wiki/Canonical_normal_form#Minterms) canonical form. Each `true` segment becomes a product term (conjunction of boundary constraints), and all terms are joined with `or`. Adjacent boundary points collapse into `=` anchors.
**Example:** `normalize` can simplify:
- `>=1.0.0:0 && <=1.0.0:0``=1.0.0:0`
- `>=2.0.0:0 || >=1.0.0:0``>=1.0.0:0`
- `!(!>=1.0.0:0)``>=1.0.0:0`
**Also exposes:**
- `satisfiable(): boolean` — returns `true` if there exists any version satisfying the range (checks if `collapse(tables())` is not `false`)
- `intersects(other): boolean` — returns `true` if `and(this, other)` is satisfiable
## API Differences Between Rust and TypeScript
| | Rust | TypeScript |
|-|------|------------|
| **`^` / `~`** | Expanded at construction to `And(GTE, LT)` | First-class operator on `Anchor` |
| **`not()`** | Static, eagerly simplifies (De Morgan, double negation) | Instance method, just wraps |
| **`and()`/`or()`** | Binary static | Both binary instance and variadic static |
| **Normalization** | `reduce()` — shallow, one AST level | `normalize()` — deep canonical form via truth tables |
| **Satisfiability** | Not available | `satisfiable()` and `intersects(other)` |
| **ExtendedVersion helpers** | `with_flavor()`, `without_flavor()`, `map_upstream()`, `map_downstream()` | `incrementMajor()`, `incrementMinor()`, `greaterThan()`, `lessThan()`, `equals()`, etc. |
| **Monoid wrappers** | `AnyRange` (fold with `or`) and `AllRange` (fold with `and`) | Not present — use variadic static methods |
| **`VersionString`** | Wrapper caching parsed + string form | Not present |
| **Emver compat** | `From<emver::Version>` for `ExtendedVersion` | `ExtendedVersion.parseEmver()`, `VersionRange.parseEmver()` |
## Serde
All types serialize/deserialize as strings (requires `serde` feature, enabled in StartOS):
```json
{
"version": "1.2.3:0",
"targetVersion": ">=1.0.0:0 <2.0.0:0",
"sourceVersion": "^0.3.0:0"
}
```

100
agents/i18n-patterns.md Normal file
View File

@@ -0,0 +1,100 @@
# i18n Patterns in `core/`
## Library & Setup
**Crate:** [`rust-i18n`](https://crates.io/crates/rust-i18n) v3.1.5 (`core/Cargo.toml`)
**Initialization** (`core/src/lib.rs:3`):
```rust
rust_i18n::i18n!("locales", fallback = ["en_US"]);
```
This macro scans `core/locales/` at compile time and embeds all translations as constants.
**Prelude re-export** (`core/src/prelude.rs:4`):
```rust
pub use rust_i18n::t;
```
Most modules import `t!` via the prelude.
## Translation File
**Location:** `core/locales/i18n.yaml`
**Format:** YAML v2 (~755 keys)
**Supported languages:** `en_US`, `de_DE`, `es_ES`, `fr_FR`, `pl_PL`
**Entry structure:**
```yaml
namespace.sub.key-name:
en_US: "English text with %{param}"
de_DE: "German text with %{param}"
# ...
```
## Using `t!()`
```rust
// Simple key
t!("error.unknown")
// With parameter interpolation (%{name} in YAML)
t!("bins.deprecated.renamed", old = old_name, new = new_name)
```
## Key Naming Conventions
Keys use **dot-separated hierarchical namespaces** with **kebab-case** for multi-word segments:
```
<module>.<submodule>.<descriptive-name>
```
Examples:
- `error.incorrect-password` — error kind label
- `bins.start-init.updating-firmware` — startup phase message
- `backup.bulk.complete-title` — backup notification title
- `help.arg.acme-contact` — CLI help text for an argument
- `context.diagnostic.starting-diagnostic-ui` — diagnostic context status
### Top-Level Namespaces
| Namespace | Purpose |
|-----------|---------|
| `error.*` | `ErrorKind` display strings (see `src/error.rs`) |
| `bins.*` | CLI binary messages (deprecated, start-init, startd, etc.) |
| `init.*` | Initialization phase labels |
| `setup.*` | First-run setup messages |
| `context.*` | Context startup messages (diagnostic, setup, CLI) |
| `service.*` | Service lifecycle messages |
| `backup.*` | Backup/restore operation messages |
| `registry.*` | Package registry messages |
| `net.*` | Network-related messages |
| `middleware.*` | Request middleware messages (auth, etc.) |
| `disk.*` | Disk operation messages |
| `lxc.*` | Container management messages |
| `system.*` | System monitoring/metrics messages |
| `notifications.*` | User-facing notification messages |
| `update.*` | OS update messages |
| `util.*` | Utility messages (TUI, RPC) |
| `ssh.*` | SSH operation messages |
| `shutdown.*` | Shutdown-related messages |
| `logs.*` | Log-related messages |
| `auth.*` | Authentication messages |
| `help.*` | CLI help text (`help.arg.<arg-name>`) |
| `about.*` | CLI command descriptions |
## Locale Selection
`core/src/bins/mod.rs:15-36``set_locale_from_env()`:
1. Reads `LANG` environment variable
2. Strips `.UTF-8` suffix
3. Exact-matches against available locales, falls back to language-prefix match (e.g. `en_GB` matches `en_US`)
## Adding New Keys
1. Add the key to `core/locales/i18n.yaml` with all 5 language translations
2. Use the `t!("your.key.name")` macro in Rust code
3. Follow existing namespace conventions — match the module path where the key is used
4. Use kebab-case for multi-word segments
5. Translations are validated at compile time

226
agents/rpc-toolkit.md Normal file
View File

@@ -0,0 +1,226 @@
# rpc-toolkit
StartOS uses [rpc-toolkit](https://github.com/Start9Labs/rpc-toolkit) for its JSON-RPC API. This document covers the patterns used in this codebase.
## Overview
The API is JSON-RPC (not REST). All endpoints are RPC methods organized in a hierarchical command structure.
## Handler Functions
There are four types of handler functions, chosen based on the function's characteristics:
### `from_fn_async` - Async handlers
For standard async functions. Most handlers use this.
```rust
pub async fn my_handler(ctx: RpcContext, params: MyParams) -> Result<MyResponse, Error> {
// Can use .await
}
from_fn_async(my_handler)
```
### `from_fn_async_local` - Non-thread-safe async handlers
For async functions that are not `Send` (cannot be safely moved between threads). Use when working with non-thread-safe types.
```rust
pub async fn cli_download(ctx: CliContext, params: Params) -> Result<(), Error> {
// Non-Send async operations
}
from_fn_async_local(cli_download)
```
### `from_fn_blocking` - Sync blocking handlers
For synchronous functions that perform blocking I/O or long computations.
```rust
pub fn query_dns(ctx: RpcContext, params: DnsParams) -> Result<DnsResponse, Error> {
// Blocking operations (file I/O, DNS lookup, etc.)
}
from_fn_blocking(query_dns)
```
### `from_fn` - Sync non-blocking handlers
For pure functions or quick synchronous operations with no I/O.
```rust
pub fn echo(ctx: RpcContext, params: EchoParams) -> Result<String, Error> {
Ok(params.message)
}
from_fn(echo)
```
## ParentHandler
Groups related RPC methods into a hierarchy:
```rust
use rpc_toolkit::{Context, HandlerExt, ParentHandler, from_fn_async};
pub fn my_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
.subcommand("list", from_fn_async(list_handler).with_call_remote::<CliContext>())
.subcommand("create", from_fn_async(create_handler).with_call_remote::<CliContext>())
}
```
## Handler Extensions
Chain methods to configure handler behavior.
**Ordering rules:**
1. `with_about()` must come AFTER other CLI modifiers (`no_display()`, `with_custom_display_fn()`, etc.)
2. `with_call_remote()` must be the LAST adapter in the chain
| Method | Purpose |
|--------|---------|
| `.with_metadata("key", Value)` | Attach metadata for middleware |
| `.no_cli()` | RPC-only, not available via CLI |
| `.no_display()` | No CLI output |
| `.with_display_serializable()` | Default JSON/YAML output for CLI |
| `.with_custom_display_fn(\|_, res\| ...)` | Custom CLI output formatting |
| `.with_about("about.description")` | Add help text (i18n key) - **after CLI modifiers** |
| `.with_call_remote::<CliContext>()` | Enable CLI to call remotely - **must be last** |
### Correct ordering example:
```rust
from_fn_async(my_handler)
.with_metadata("sync_db", Value::Bool(true)) // metadata early
.no_display() // CLI modifier
.with_about("about.my-handler") // after CLI modifiers
.with_call_remote::<CliContext>() // always last
```
## Metadata by Middleware
Metadata tags are processed by different middleware. Group them logically:
### Auth Middleware (`middleware/auth/mod.rs`)
| Metadata | Default | Description |
|----------|---------|-------------|
| `authenticated` | `true` | Whether endpoint requires authentication. Set to `false` for public endpoints. |
### Session Auth Middleware (`middleware/auth/session.rs`)
| Metadata | Default | Description |
|----------|---------|-------------|
| `login` | `false` | Special handling for login endpoints (rate limiting, cookie setting) |
| `get_session` | `false` | Inject session ID into params as `__Auth_session` |
### Signature Auth Middleware (`middleware/auth/signature.rs`)
| Metadata | Default | Description |
|----------|---------|-------------|
| `get_signer` | `false` | Inject signer public key into params as `__Auth_signer` |
### Registry Auth (extends Signature Auth)
| Metadata | Default | Description |
|----------|---------|-------------|
| `admin` | `false` | Require admin privileges (signer must be in admin list) |
| `get_device_info` | `false` | Inject device info header for hardware filtering |
### Database Middleware (`middleware/db.rs`)
| Metadata | Default | Description |
|----------|---------|-------------|
| `sync_db` | `false` | Sync database after mutation, add `X-Patch-Sequence` header |
## Context Types
Different contexts for different execution environments:
- `RpcContext` - Web/RPC requests with full service access
- `CliContext` - CLI operations, calls remote RPC
- `InitContext` - During system initialization
- `DiagnosticContext` - Diagnostic/recovery mode
- `RegistryContext` - Registry daemon context
- `EffectContext` - Service effects context (container-to-host calls)
## Parameter Structs
Parameters use derive macros for JSON-RPC, CLI parsing, and TypeScript generation:
```rust
#[derive(Deserialize, Serialize, Parser, TS)]
#[serde(rename_all = "camelCase")] // JSON-RPC uses camelCase
#[command(rename_all = "kebab-case")] // CLI uses kebab-case
#[ts(export)] // Generate TypeScript types
pub struct MyParams {
pub package_id: PackageId,
}
```
### Middleware Injection
Auth middleware can inject values into params using special field names:
```rust
#[derive(Deserialize, Serialize, Parser, TS)]
pub struct MyParams {
#[ts(skip)]
#[serde(rename = "__Auth_session")] // Injected by session auth
session: InternedString,
#[ts(skip)]
#[serde(rename = "__Auth_signer")] // Injected by signature auth
signer: AnyVerifyingKey,
#[ts(skip)]
#[serde(rename = "__Auth_userAgent")] // Injected during login
user_agent: Option<String>,
}
```
## Common Patterns
### Adding a New RPC Endpoint
1. Define params struct with `Deserialize, Serialize, Parser, TS`
2. Choose handler type based on sync/async and thread-safety
3. Write handler function taking `(Context, Params) -> Result<Response, Error>`
4. Add to parent handler with appropriate extensions (display modifiers before `with_about`)
5. TypeScript types auto-generated via `make ts-bindings`
### Public (Unauthenticated) Endpoint
```rust
from_fn_async(get_info)
.with_metadata("authenticated", Value::Bool(false))
.with_display_serializable()
.with_about("about.get-info")
.with_call_remote::<CliContext>() // last
```
### Mutating Endpoint with DB Sync
```rust
from_fn_async(update_config)
.with_metadata("sync_db", Value::Bool(true))
.no_display()
.with_about("about.update-config")
.with_call_remote::<CliContext>() // last
```
### Session-Aware Endpoint
```rust
from_fn_async(logout)
.with_metadata("get_session", Value::Bool(true))
.no_display()
.with_about("about.logout")
.with_call_remote::<CliContext>() // last
```
## File Locations
- Handler definitions: Throughout `core/src/` modules
- Main API tree: `core/src/lib.rs` (`main_api()`, `server()`, `package()`)
- Auth middleware: `core/src/middleware/auth/`
- DB middleware: `core/src/middleware/db.rs`
- Context types: `core/src/context/`

122
agents/s9pk-structure.md Normal file
View File

@@ -0,0 +1,122 @@
# S9PK Package Format
S9PK is the package format for StartOS services. Version 2 uses a merkle archive structure for efficient downloading and cryptographic verification.
## File Format
S9PK files begin with a 3-byte header: `0x3b 0x3b 0x02` (magic bytes + version 2).
The archive is cryptographically signed using Ed25519 with prehashed content (SHA-512 over blake3 merkle root hash).
## Archive Structure
```
/
├── manifest.json # Package metadata (required)
├── icon.<ext> # Package icon - any image/* format (required)
├── LICENSE.md # License text (required)
├── dependencies/ # Dependency metadata (optional)
│ └── <package-id>/
│ ├── metadata.json # DependencyMetadata
│ └── icon.<ext> # Dependency icon
├── javascript.squashfs # Package JavaScript code (required)
├── assets.squashfs # Static assets (optional, legacy: assets/ directory)
└── images/ # Container images by architecture
└── <arch>/ # e.g., x86_64, aarch64, riscv64
├── <image-id>.squashfs # Container filesystem
├── <image-id>.json # Image metadata
└── <image-id>.env # Environment variables
```
## Components
### manifest.json
The package manifest contains all metadata:
| Field | Type | Description |
|-------|------|-------------|
| `id` | string | Package identifier (e.g., `bitcoind`) |
| `title` | string | Display name |
| `version` | string | Extended version string |
| `satisfies` | string[] | Version ranges this version satisfies |
| `releaseNotes` | string/object | Release notes (localized) |
| `canMigrateTo` | string | Version range for forward migration |
| `canMigrateFrom` | string | Version range for backward migration |
| `license` | string | License type |
| `wrapperRepo` | string | StartOS wrapper repository URL |
| `upstreamRepo` | string | Upstream project URL |
| `supportSite` | string | Support site URL |
| `marketingSite` | string | Marketing site URL |
| `donationUrl` | string? | Optional donation URL |
| `docsUrl` | string? | Optional documentation URL |
| `description` | object | Short and long descriptions (localized) |
| `images` | object | Image configurations by image ID |
| `volumes` | string[] | Volume IDs for persistent data |
| `alerts` | object | User alerts for lifecycle events |
| `dependencies` | object | Package dependencies |
| `hardwareRequirements` | object | Hardware requirements (arch, RAM, devices) |
| `hardwareAcceleration` | boolean | Whether package uses hardware acceleration |
| `gitHash` | string? | Git commit hash |
| `osVersion` | string | Minimum StartOS version |
| `sdkVersion` | string? | SDK version used to build |
### javascript.squashfs
Contains the package JavaScript that implements the `ABI` interface from `@start9labs/start-sdk-base`. This code runs in the container runtime and manages the package lifecycle.
The squashfs is mounted at `/usr/lib/startos/package/` and the runtime loads `index.js`.
### images/
Container images organized by architecture:
- **`<image-id>.squashfs`** - Container root filesystem
- **`<image-id>.json`** - Image metadata (entrypoint, user, workdir, etc.)
- **`<image-id>.env`** - Environment variables for the container
Images are built from Docker/Podman and converted to squashfs. The `ImageConfig` in manifest specifies:
- `arch` - Supported architectures
- `emulateMissingAs` - Fallback architecture for emulation
- `nvidiaContainer` - Whether to enable NVIDIA container support
### assets.squashfs
Static assets accessible to the package, mounted read-only at `/media/startos/assets/` in the container.
### dependencies/
Metadata for dependencies displayed in the UI:
- `metadata.json` - Just title for now
- `icon.<ext>` - Icon for the dependency
## Merkle Archive
The S9PK uses a merkle tree structure where each file and directory has a blake3 hash. This enables:
1. **Partial downloads** - Download and verify individual files
2. **Integrity verification** - Verify any subset of the archive
3. **Efficient updates** - Only download changed portions
4. **DOS protection** - Size limits enforced before downloading content
Files are sorted by priority for streaming (manifest first, then icon, license, dependencies, javascript, assets, images).
## Building S9PK
Use `start-cli s9pk pack` to build packages:
```bash
start-cli s9pk pack <manifest-path> -o <output.s9pk>
```
Images can be sourced from:
- Docker/Podman build (`--docker-build`)
- Existing Docker tag (`--docker-tag`)
- Pre-built squashfs files
## Related Code
- `core/src/s9pk/v2/mod.rs` - S9pk struct and serialization
- `core/src/s9pk/v2/manifest.rs` - Manifest types
- `core/src/s9pk/v2/pack.rs` - Packing logic
- `core/src/s9pk/merkle_archive/` - Merkle archive implementation

View File

@@ -46,6 +46,7 @@ openssh-server
podman podman
psmisc psmisc
qemu-guest-agent qemu-guest-agent
qemu-user-static
rfkill rfkill
rsync rsync
samba-common-bin samba-common-bin

View File

@@ -111,6 +111,6 @@ if [ "$CHROOT_RES" -eq 0 ]; then
reboot reboot
fi fi
umount -R /media/startos/next umount /media/startos/next
umount /media/startos/upper umount /media/startos/upper
rm -rf /media/startos/upper /media/startos/next rm -rf /media/startos/upper /media/startos/next

View File

@@ -85,9 +85,9 @@ cat << EOF
# ISO Downloads # ISO Downloads
- [x86_64/AMD64](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_x86_64-nonfree.iso)) - [x86_64/AMD64](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_x86_64-nonfree.iso))
- [x86_64/AMD64 (Slim/FOSS-Only)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_x86_64.iso) "Without proprietary software or drivers") - [x86_64/AMD64-slim (FOSS-only)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_x86_64.iso) "Without proprietary software or drivers")
- [aarch64/ARM64](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_aarch64-nonfree.iso)) - [aarch64/ARM64](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_aarch64-nonfree.iso))
- [aarch64/ARM64 (Slim/FOSS-Only)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_aarch64.iso) "Without proprietary software or drivers") - [aarch64/ARM64-slim (FOSS-Only)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_aarch64.iso) "Without proprietary software or drivers")
- [RISCV64 (RVA23)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_riscv64.iso)) - [RISCV64 (RVA23)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_riscv64.iso))
EOF EOF

View File

@@ -1,16 +1,21 @@
# Container RPC SERVER Specification # Container RPC Server Specification
The container runtime exposes a JSON-RPC server over a Unix socket at `/media/startos/rpc/service.sock`.
## Methods ## Methods
### init ### init
initialize runtime (mount `/proc`, `/sys`, `/dev`, and `/run` to each image in `/media/images`) Initialize the runtime and system.
called after os has mounted js and images to the container #### params
#### args ```ts
{
`[]` id: string,
kind: "install" | "update" | "restore" | null,
}
```
#### response #### response
@@ -18,11 +23,16 @@ called after os has mounted js and images to the container
### exit ### exit
shutdown runtime Shutdown runtime and optionally run exit hooks for a target version.
#### args #### params
`[]` ```ts
{
id: string,
target: string | null, // ExtendedVersion or VersionRange
}
```
#### response #### response
@@ -30,11 +40,11 @@ shutdown runtime
### start ### start
run main method if not already running Run main method if not already running.
#### args #### params
`[]` None
#### response #### response
@@ -42,11 +52,11 @@ run main method if not already running
### stop ### stop
stop main method by sending SIGTERM to child processes, and SIGKILL after timeout Stop main method by sending SIGTERM to child processes, and SIGKILL after timeout.
#### args #### params
`{ timeout: millis }` None
#### response #### response
@@ -54,15 +64,16 @@ stop main method by sending SIGTERM to child processes, and SIGKILL after timeou
### execute ### execute
run a specific package procedure Run a specific package procedure.
#### args #### params
```ts ```ts
{ {
procedure: JsonPath, id: string, // event ID
input: any, procedure: string, // JSON path (e.g., "/backup/create", "/actions/{name}/run")
timeout: millis, input: any,
timeout: number | null,
} }
``` ```
@@ -72,18 +83,64 @@ run a specific package procedure
### sandbox ### sandbox
run a specific package procedure in sandbox mode Run a specific package procedure in sandbox mode. Same interface as `execute`.
#### args UNIMPLEMENTED: this feature is planned but does not exist
#### params
```ts ```ts
{ {
procedure: JsonPath, id: string,
input: any, procedure: string,
timeout: millis, input: any,
timeout: number | null,
} }
``` ```
#### response #### response
`any` `any`
### callback
Handle a callback from an effect.
#### params
```ts
{
id: number,
args: any[],
}
```
#### response
`null` (no response sent)
### eval
Evaluate a script in the runtime context. Used for debugging.
#### params
```ts
{
script: string,
}
```
#### response
`any`
## Procedures
The `execute` and `sandbox` methods route to procedures based on the `procedure` path:
| Procedure | Description |
|-----------|-------------|
| `/backup/create` | Create a backup |
| `/actions/{name}/getInput` | Get input spec for an action |
| `/actions/{name}/run` | Run an action with input |

2
core/Cargo.lock generated
View File

@@ -7817,7 +7817,7 @@ dependencies = [
[[package]] [[package]]
name = "start-os" name = "start-os"
version = "0.4.0-alpha.18" version = "0.4.0-alpha.19"
dependencies = [ dependencies = [
"aes 0.7.5", "aes 0.7.5",
"arti-client", "arti-client",

View File

@@ -15,7 +15,7 @@ license = "MIT"
name = "start-os" name = "start-os"
readme = "README.md" readme = "README.md"
repository = "https://github.com/Start9Labs/start-os" repository = "https://github.com/Start9Labs/start-os"
version = "0.4.0-alpha.18" # VERSION_BUMP version = "0.4.0-alpha.19" # VERSION_BUMP
[lib] [lib]
name = "startos" name = "startos"
@@ -176,6 +176,7 @@ mio = "1"
new_mime_guess = "4" new_mime_guess = "4"
nix = { version = "0.30.1", features = [ nix = { version = "0.30.1", features = [
"fs", "fs",
"hostname",
"mount", "mount",
"net", "net",
"process", "process",

View File

@@ -1843,18 +1843,18 @@ service.mod.failed-to-parse-package-data-entry:
pl_PL: "Nie udało się przeanalizować PackageDataEntry, znaleziono: %{error}" pl_PL: "Nie udało się przeanalizować PackageDataEntry, znaleziono: %{error}"
service.mod.no-matching-subcontainers: service.mod.no-matching-subcontainers:
en_US: "no matching subcontainers are running for %{id}; some possible choices are:\n%{subcontainers}" en_US: "no matching subcontainers are running for %{id}; some possible choices are:"
de_DE: "keine passenden Subcontainer laufen für %{id}; einige mögliche Optionen sind:\n%{subcontainers}" de_DE: "keine passenden Subcontainer laufen für %{id}; einige mögliche Optionen sind:"
es_ES: "no hay subcontenedores coincidentes ejecutándose para %{id}; algunas opciones posibles son:\n%{subcontainers}" es_ES: "no hay subcontenedores coincidentes ejecutándose para %{id}; algunas opciones posibles son:"
fr_FR: "aucun sous-conteneur correspondant n'est en cours d'exécution pour %{id} ; voici quelques choix possibles :\n%{subcontainers}" fr_FR: "aucun sous-conteneur correspondant n'est en cours d'exécution pour %{id} ; voici quelques choix possibles :"
pl_PL: "nie działają pasujące podkontenery dla %{id}; niektóre możliwe wybory to:\n%{subcontainers}" pl_PL: "nie działają pasujące podkontenery dla %{id}; niektóre możliwe wybory to:"
service.mod.multiple-subcontainers-found: service.mod.multiple-subcontainers-found:
en_US: "multiple subcontainers found for %{id}: \n%{subcontainer_ids}" en_US: "multiple subcontainers found for %{id}"
de_DE: "mehrere Subcontainer für %{id} gefunden: \n%{subcontainer_ids}" de_DE: "mehrere Subcontainer für %{id} gefunden"
es_ES: "se encontraron múltiples subcontenedores para %{id}: \n%{subcontainer_ids}" es_ES: "se encontraron múltiples subcontenedores para %{id}"
fr_FR: "plusieurs sous-conteneurs trouvés pour %{id} : \n%{subcontainer_ids}" fr_FR: "plusieurs sous-conteneurs trouvés pour %{id}"
pl_PL: "znaleziono wiele podkontenerów dla %{id}: \n%{subcontainer_ids}" pl_PL: "znaleziono wiele podkontenerów dla %{id}"
service.mod.invalid-byte-length-for-signal: service.mod.invalid-byte-length-for-signal:
en_US: "invalid byte length for signal: %{length}" en_US: "invalid byte length for signal: %{length}"
@@ -3703,6 +3703,20 @@ help.arg.wireguard-config:
fr_FR: "Configuration WireGuard" fr_FR: "Configuration WireGuard"
pl_PL: "Konfiguracja WireGuard" pl_PL: "Konfiguracja WireGuard"
help.s9pk-s3base:
en_US: "Base URL for publishing s9pks"
de_DE: "Basis-URL für die Veröffentlichung von s9pks"
es_ES: "URL base para publicar s9pks"
fr_FR: "URL de base pour publier les s9pks"
pl_PL: "Bazowy URL do publikowania s9pks"
help.s9pk-s3bucket:
en_US: "S3 bucket to publish s9pks to (should correspond to s3base)"
de_DE: "S3-Bucket zum Veröffentlichen von s9pks (sollte mit s3base übereinstimmen)"
es_ES: "Bucket S3 para publicar s9pks (debe corresponder con s3base)"
fr_FR: "Bucket S3 pour publier les s9pks (doit correspondre à s3base)"
pl_PL: "Bucket S3 do publikowania s9pks (powinien odpowiadać s3base)"
# CLI command descriptions (about.*) # CLI command descriptions (about.*)
about.add-address-to-host: about.add-address-to-host:
en_US: "Add an address to this host" en_US: "Add an address to this host"
@@ -4866,6 +4880,13 @@ about.persist-new-notification:
fr_FR: "Persister une nouvelle notification" fr_FR: "Persister une nouvelle notification"
pl_PL: "Utrwal nowe powiadomienie" pl_PL: "Utrwal nowe powiadomienie"
about.publish-s9pk:
en_US: "Publish s9pk to S3 bucket and index on registry"
de_DE: "S9pk in S3-Bucket veröffentlichen und in Registry indizieren"
es_ES: "Publicar s9pk en bucket S3 e indexar en el registro"
fr_FR: "Publier s9pk dans le bucket S3 et indexer dans le registre"
pl_PL: "Opublikuj s9pk do bucketu S3 i zindeksuj w rejestrze"
about.rebuild-service-container: about.rebuild-service-container:
en_US: "Rebuild service container" en_US: "Rebuild service container"
de_DE: "Dienst-Container neu erstellen" de_DE: "Dienst-Container neu erstellen"

View File

@@ -38,6 +38,8 @@ pub struct CliContextSeed {
pub registry_url: Option<Url>, pub registry_url: Option<Url>,
pub registry_hostname: Vec<InternedString>, pub registry_hostname: Vec<InternedString>,
pub registry_listen: Option<SocketAddr>, pub registry_listen: Option<SocketAddr>,
pub s9pk_s3base: Option<Url>,
pub s9pk_s3bucket: Option<InternedString>,
pub tunnel_addr: Option<SocketAddr>, pub tunnel_addr: Option<SocketAddr>,
pub tunnel_listen: Option<SocketAddr>, pub tunnel_listen: Option<SocketAddr>,
pub client: Client, pub client: Client,
@@ -129,6 +131,8 @@ impl CliContext {
.transpose()?, .transpose()?,
registry_hostname: config.registry_hostname.unwrap_or_default(), registry_hostname: config.registry_hostname.unwrap_or_default(),
registry_listen: config.registry_listen, registry_listen: config.registry_listen,
s9pk_s3base: config.s9pk_s3base,
s9pk_s3bucket: config.s9pk_s3bucket,
tunnel_addr: config.tunnel, tunnel_addr: config.tunnel,
tunnel_listen: config.tunnel_listen, tunnel_listen: config.tunnel_listen,
client: { client: {

View File

@@ -68,6 +68,10 @@ pub struct ClientConfig {
pub registry_hostname: Option<Vec<InternedString>>, pub registry_hostname: Option<Vec<InternedString>>,
#[arg(skip)] #[arg(skip)]
pub registry_listen: Option<SocketAddr>, pub registry_listen: Option<SocketAddr>,
#[arg(long, help = "help.s9pk-s3base")]
pub s9pk_s3base: Option<Url>,
#[arg(long, help = "help.s9pk-s3bucket")]
pub s9pk_s3bucket: Option<InternedString>,
#[arg(short = 't', long, help = "help.arg.tunnel-address")] #[arg(short = 't', long, help = "help.arg.tunnel-address")]
pub tunnel: Option<SocketAddr>, pub tunnel: Option<SocketAddr>,
#[arg(skip)] #[arg(skip)]
@@ -89,8 +93,13 @@ impl ContextConfig for ClientConfig {
self.host = self.host.take().or(other.host); self.host = self.host.take().or(other.host);
self.registry = self.registry.take().or(other.registry); self.registry = self.registry.take().or(other.registry);
self.registry_hostname = self.registry_hostname.take().or(other.registry_hostname); self.registry_hostname = self.registry_hostname.take().or(other.registry_hostname);
self.registry_listen = self.registry_listen.take().or(other.registry_listen);
self.s9pk_s3base = self.s9pk_s3base.take().or(other.s9pk_s3base);
self.s9pk_s3bucket = self.s9pk_s3bucket.take().or(other.s9pk_s3bucket);
self.tunnel = self.tunnel.take().or(other.tunnel); self.tunnel = self.tunnel.take().or(other.tunnel);
self.tunnel_listen = self.tunnel_listen.take().or(other.tunnel_listen);
self.proxy = self.proxy.take().or(other.proxy); self.proxy = self.proxy.take().or(other.proxy);
self.socks_listen = self.socks_listen.take().or(other.socks_listen);
self.cookie_path = self.cookie_path.take().or(other.cookie_path); self.cookie_path = self.cookie_path.take().or(other.cookie_path);
self.developer_key_path = self.developer_key_path.take().or(other.developer_key_path); self.developer_key_path = self.developer_key_path.take().or(other.developer_key_path);
} }

View File

@@ -579,6 +579,7 @@ impl RpcContext {
pub async fn call_remote<RemoteContext>( pub async fn call_remote<RemoteContext>(
&self, &self,
method: &str, method: &str,
metadata: OrdMap<&'static str, Value>,
params: Value, params: Value,
) -> Result<Value, RpcError> ) -> Result<Value, RpcError>
where where
@@ -587,7 +588,7 @@ impl RpcContext {
<Self as CallRemote<RemoteContext, Empty>>::call_remote( <Self as CallRemote<RemoteContext, Empty>>::call_remote(
&self, &self,
method, method,
OrdMap::new(), metadata,
params, params,
Empty {}, Empty {},
) )
@@ -596,20 +597,15 @@ impl RpcContext {
pub async fn call_remote_with<RemoteContext, T>( pub async fn call_remote_with<RemoteContext, T>(
&self, &self,
method: &str, method: &str,
metadata: OrdMap<&'static str, Value>,
params: Value, params: Value,
extra: T, extra: T,
) -> Result<Value, RpcError> ) -> Result<Value, RpcError>
where where
Self: CallRemote<RemoteContext, T>, Self: CallRemote<RemoteContext, T>,
{ {
<Self as CallRemote<RemoteContext, T>>::call_remote( <Self as CallRemote<RemoteContext, T>>::call_remote(&self, method, metadata, params, extra)
&self, .await
method,
OrdMap::new(),
params,
extra,
)
.await
} }
} }
impl AsRef<Client> for RpcContext { impl AsRef<Client> for RpcContext {

View File

@@ -4,7 +4,7 @@ use axum::http::StatusCode;
use axum::http::uri::InvalidUri; use axum::http::uri::InvalidUri;
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
use num_enum::TryFromPrimitive; use num_enum::TryFromPrimitive;
use patch_db::Revision; use patch_db::Value;
use rpc_toolkit::reqwest; use rpc_toolkit::reqwest;
use rpc_toolkit::yajrc::{ use rpc_toolkit::yajrc::{
INVALID_PARAMS_ERROR, INVALID_REQUEST_ERROR, METHOD_NOT_FOUND_ERROR, PARSE_ERROR, RpcError, INVALID_PARAMS_ERROR, INVALID_REQUEST_ERROR, METHOD_NOT_FOUND_ERROR, PARSE_ERROR, RpcError,
@@ -16,6 +16,7 @@ use tokio_rustls::rustls;
use ts_rs::TS; use ts_rs::TS;
use crate::InvalidId; use crate::InvalidId;
use crate::prelude::to_value;
#[derive(Debug, Clone, Copy, PartialEq, Eq, TryFromPrimitive)] #[derive(Debug, Clone, Copy, PartialEq, Eq, TryFromPrimitive)]
#[repr(i32)] #[repr(i32)]
@@ -197,7 +198,7 @@ pub struct Error {
pub source: color_eyre::eyre::Error, pub source: color_eyre::eyre::Error,
pub debug: Option<color_eyre::eyre::Error>, pub debug: Option<color_eyre::eyre::Error>,
pub kind: ErrorKind, pub kind: ErrorKind,
pub revision: Option<Revision>, pub info: Value,
pub task: Option<JoinHandle<()>>, pub task: Option<JoinHandle<()>>,
} }
@@ -228,7 +229,7 @@ impl Error {
source: source.into(), source: source.into(),
debug, debug,
kind, kind,
revision: None, info: Value::Null,
task: None, task: None,
} }
} }
@@ -237,7 +238,7 @@ impl Error {
source: eyre!("{}", self.source), source: eyre!("{}", self.source),
debug: self.debug.as_ref().map(|e| eyre!("{e}")), debug: self.debug.as_ref().map(|e| eyre!("{e}")),
kind: self.kind, kind: self.kind,
revision: self.revision.clone(), info: self.info.clone(),
task: None, task: None,
} }
} }
@@ -245,6 +246,10 @@ impl Error {
self.task = Some(task); self.task = Some(task);
self self
} }
pub fn with_info(mut self, info: Value) -> Self {
self.info = info;
self
}
pub async fn wait(mut self) -> Self { pub async fn wait(mut self) -> Self {
if let Some(task) = &mut self.task { if let Some(task) = &mut self.task {
task.await.log_err(); task.await.log_err();
@@ -423,6 +428,8 @@ impl From<patch_db::value::Error> for Error {
pub struct ErrorData { pub struct ErrorData {
pub details: String, pub details: String,
pub debug: String, pub debug: String,
#[serde(default)]
pub info: Value,
} }
impl Display for ErrorData { impl Display for ErrorData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@@ -440,6 +447,7 @@ impl From<Error> for ErrorData {
Self { Self {
details: value.to_string(), details: value.to_string(),
debug: format!("{:?}", value), debug: format!("{:?}", value),
info: value.info,
} }
} }
} }
@@ -470,47 +478,40 @@ impl From<&RpcError> for ErrorData {
.or_else(|| d.as_str().map(|s| s.to_owned())) .or_else(|| d.as_str().map(|s| s.to_owned()))
}) })
.unwrap_or_else(|| value.message.clone().into_owned()), .unwrap_or_else(|| value.message.clone().into_owned()),
info: to_value(
&value
.data
.as_ref()
.and_then(|d| d.as_object().and_then(|d| d.get("info"))),
)
.unwrap_or_default(),
} }
} }
} }
impl From<Error> for RpcError { impl From<Error> for RpcError {
fn from(e: Error) -> Self { fn from(e: Error) -> Self {
let mut data_object = serde_json::Map::with_capacity(3); let kind = e.kind;
data_object.insert("details".to_owned(), format!("{}", e.source).into()); let data = ErrorData::from(e);
data_object.insert("debug".to_owned(), format!("{:?}", e.source).into()); RpcError {
data_object.insert( code: kind as i32,
"revision".to_owned(), message: kind.as_str().into(),
match serde_json::to_value(&e.revision) { data: Some(match serde_json::to_value(&data) {
Ok(a) => a, Ok(a) => a,
Err(e) => { Err(e) => {
tracing::warn!("Error serializing revision for Error object: {}", e); tracing::warn!("Error serializing ErrorData object: {}", e);
serde_json::Value::Null serde_json::Value::Null
} }
}, }),
);
RpcError {
code: e.kind as i32,
message: e.kind.as_str().into(),
data: Some(
match serde_json::to_value(&ErrorData {
details: format!("{}", e.source),
debug: format!("{:?}", e.source),
}) {
Ok(a) => a,
Err(e) => {
tracing::warn!("Error serializing revision for Error object: {}", e);
serde_json::Value::Null
}
},
),
} }
} }
} }
impl From<RpcError> for Error { impl From<RpcError> for Error {
fn from(e: RpcError) -> Self { fn from(e: RpcError) -> Self {
let data = ErrorData::from(&e);
let info = data.info.clone();
Error::new( Error::new(
ErrorData::from(&e), data,
if let Ok(kind) = e.code.try_into() { if let Ok(kind) = e.code.try_into() {
kind kind
} else if e.code == METHOD_NOT_FOUND_ERROR.code { } else if e.code == METHOD_NOT_FOUND_ERROR.code {
@@ -524,6 +525,7 @@ impl From<RpcError> for Error {
ErrorKind::Unknown ErrorKind::Unknown
}, },
) )
.with_info(info)
} }
} }
@@ -606,7 +608,7 @@ where
kind, kind,
source, source,
debug, debug,
revision: None, info: Value::Null,
task: None, task: None,
} }
}) })

View File

@@ -131,9 +131,13 @@ pub async fn install(
let package: GetPackageResponse = from_value( let package: GetPackageResponse = from_value(
ctx.call_remote_with::<RegistryContext, _>( ctx.call_remote_with::<RegistryContext, _>(
"package.get", "package.get",
[("get_device_info", Value::Bool(true))]
.into_iter()
.collect(),
json!({ json!({
"id": id, "id": id,
"targetVersion": VersionRange::exactly(version.deref().clone()), "targetVersion": VersionRange::exactly(version.deref().clone()),
"otherVersions": "none",
}), }),
RegistryUrlParams { RegistryUrlParams {
registry: registry.clone(), registry: registry.clone(),
@@ -481,7 +485,7 @@ pub async fn cli_install(
let mut packages: GetPackageResponse = from_value( let mut packages: GetPackageResponse = from_value(
ctx.call_remote::<RegistryContext>( ctx.call_remote::<RegistryContext>(
"package.get", "package.get",
json!({ "id": &id, "targetVersion": version, "sourceVersion": source_version }), json!({ "id": &id, "targetVersion": version, "sourceVersion": source_version, "otherVersions": "none" }),
) )
.await?, .await?,
)?; )?;

View File

@@ -540,7 +540,10 @@ pub fn package<C: Context>() -> ParentHandler<C> {
.with_about("about.execute-commands-container") .with_about("about.execute-commands-container")
.no_cli(), .no_cli(),
) )
.subcommand("attach", from_fn_async(service::cli_attach).no_display()) .subcommand(
"attach",
from_fn_async_local(service::cli_attach).no_display(),
)
.subcommand( .subcommand(
"host", "host",
net::host::host_api::<C>().with_about("about.manage-network-hosts-package"), net::host::host_api::<C>().with_about("about.manage-network-hosts-package"),

View File

@@ -135,20 +135,24 @@ pub struct CliAddPackageParams {
} }
pub async fn cli_add_package( pub async fn cli_add_package(
HandlerArgs { ctx: CliContext,
context: ctx, CliAddPackageParams {
parent_method, file,
method, url,
params: no_verify,
CliAddPackageParams { }: CliAddPackageParams,
file,
url,
no_verify,
},
..
}: HandlerArgs<CliContext, CliAddPackageParams>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let s9pk = S9pk::open(&file, None).await?; let s9pk = S9pk::open(&file, None).await?;
cli_add_package_impl(ctx, s9pk, url, no_verify).await
}
pub async fn cli_add_package_impl(
ctx: CliContext,
s9pk: S9pk,
url: Vec<Url>,
no_verify: bool,
) -> Result<(), Error> {
let manifest = s9pk.as_manifest();
let progress = FullProgressTracker::new(); let progress = FullProgressTracker::new();
let mut sign_phase = progress.add_phase(InternedString::intern("Signing File"), Some(1)); let mut sign_phase = progress.add_phase(InternedString::intern("Signing File"), Some(1));
@@ -170,8 +174,16 @@ pub async fn cli_add_package(
Some(1), Some(1),
); );
let progress_task = let progress_task = progress.progress_bar_task(&format!(
progress.progress_bar_task(&format!("Adding {} to registry...", file.display())); "Adding {}@{}{} to registry...",
manifest.id,
manifest.version,
manifest
.hardware_requirements
.arch
.as_ref()
.map_or(String::new(), |a| format!(" ({})", a.iter().join("/")))
));
sign_phase.start(); sign_phase.start();
let commitment = s9pk.as_archive().commitment().await?; let commitment = s9pk.as_archive().commitment().await?;
@@ -188,7 +200,7 @@ pub async fn cli_add_package(
index_phase.start(); index_phase.start();
ctx.call_remote::<RegistryContext>( ctx.call_remote::<RegistryContext>(
&parent_method.into_iter().chain(method).join("."), "package.add",
imbl_value::json!({ imbl_value::json!({
"urls": &url, "urls": &url,
"signature": AnySignature::Ed25519(signature), "signature": AnySignature::Ed25519(signature),

View File

@@ -15,6 +15,7 @@ use crate::progress::{FullProgressTracker, ProgressUnits};
use crate::registry::context::RegistryContext; use crate::registry::context::RegistryContext;
use crate::registry::device_info::DeviceInfo; use crate::registry::device_info::DeviceInfo;
use crate::registry::package::index::{PackageIndex, PackageVersionInfo}; use crate::registry::package::index::{PackageIndex, PackageVersionInfo};
use crate::s9pk::manifest::LocaleString;
use crate::s9pk::merkle_archive::source::ArchiveSource; use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::s9pk::v2::SIG_CONTEXT; use crate::s9pk::v2::SIG_CONTEXT;
use crate::util::VersionString; use crate::util::VersionString;
@@ -38,11 +39,11 @@ impl Default for PackageDetailLevel {
} }
} }
#[derive(Debug, Deserialize, Serialize, TS)] #[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[ts(export)] #[ts(export)]
pub struct PackageInfoShort { pub struct PackageInfoShort {
pub release_notes: String, pub release_notes: LocaleString,
} }
#[derive(Debug, Deserialize, Serialize, TS, Parser, HasModel)] #[derive(Debug, Deserialize, Serialize, TS, Parser, HasModel)]
@@ -89,17 +90,20 @@ impl GetPackageResponse {
let lesser_versions: BTreeMap<_, _> = self let lesser_versions: BTreeMap<_, _> = self
.other_versions .other_versions
.as_ref() .clone()
.into_iter() .into_iter()
.flatten() .flatten()
.filter(|(v, _)| ***v < *version) .filter(|(v, _)| **v < *version)
.collect(); .collect();
if !lesser_versions.is_empty() { if !lesser_versions.is_empty() {
table.add_row(row![bc => "OLDER VERSIONS"]); table.add_row(row![bc => "OLDER VERSIONS"]);
table.add_row(row![bc => "VERSION", "RELEASE NOTES"]); table.add_row(row![bc => "VERSION", "RELEASE NOTES"]);
for (version, info) in lesser_versions { for (version, info) in lesser_versions {
table.add_row(row![AsRef::<str>::as_ref(version), &info.release_notes]); table.add_row(row![
AsRef::<str>::as_ref(&version),
&info.release_notes.localized()
]);
} }
} }
@@ -147,6 +151,7 @@ fn get_matching_models(
id, id,
source_version, source_version,
device_info, device_info,
target_version,
.. ..
}: &GetPackageParams, }: &GetPackageParams,
) -> Result<Vec<(PackageId, ExtendedVersion, Model<PackageVersionInfo>)>, Error> { ) -> Result<Vec<(PackageId, ExtendedVersion, Model<PackageVersionInfo>)>, Error> {
@@ -165,26 +170,29 @@ fn get_matching_models(
.as_entries()? .as_entries()?
.into_iter() .into_iter()
.map(|(v, info)| { .map(|(v, info)| {
let ev = ExtendedVersion::from(v);
Ok::<_, Error>( Ok::<_, Error>(
if source_version.as_ref().map_or(Ok(true), |source_version| { if target_version.as_ref().map_or(true, |tv| ev.satisfies(tv))
Ok::<_, Error>( && source_version.as_ref().map_or(Ok(true), |source_version| {
source_version.satisfies( Ok::<_, Error>(
&info source_version.satisfies(
.as_source_version() &info
.de()? .as_source_version()
.unwrap_or(VersionRange::any()), .de()?
), .unwrap_or(VersionRange::any()),
) ),
})? { )
})?
{
let mut info = info.clone(); let mut info = info.clone();
if let Some(device_info) = &device_info { if let Some(device_info) = &device_info {
if info.for_device(device_info)? { if info.for_device(device_info)? {
Some((k.clone(), ExtendedVersion::from(v), info)) Some((k.clone(), ev, info))
} else { } else {
None None
} }
} else { } else {
Some((k.clone(), ExtendedVersion::from(v), info)) Some((k.clone(), ev, info))
} }
} else { } else {
None None
@@ -207,12 +215,7 @@ pub async fn get_package(ctx: RegistryContext, params: GetPackageParams) -> Resu
for (id, version, info) in get_matching_models(&peek.as_index().as_package(), &params)? { for (id, version, info) in get_matching_models(&peek.as_index().as_package(), &params)? {
let package_best = best.entry(id.clone()).or_default(); let package_best = best.entry(id.clone()).or_default();
let package_other = other.entry(id.clone()).or_default(); let package_other = other.entry(id.clone()).or_default();
if params if package_best.keys().all(|k| !(**k > version)) {
.target_version
.as_ref()
.map_or(true, |v| version.satisfies(v))
&& package_best.keys().all(|k| !(**k > version))
{
for worse_version in package_best for worse_version in package_best
.keys() .keys()
.filter(|k| ***k < version) .filter(|k| ***k < version)
@@ -569,3 +572,42 @@ pub async fn cli_download(
Ok(()) Ok(())
} }
#[test]
fn check_matching_info_short() {
use crate::registry::package::index::PackageMetadata;
use crate::s9pk::manifest::{Alerts, Description};
use crate::util::DataUrl;
let lang_map = |s: &str| {
LocaleString::LanguageMap([("en".into(), s.into())].into_iter().collect())
};
let info = PackageVersionInfo {
metadata: PackageMetadata {
title: "Test Package".into(),
icon: DataUrl::from_vec("image/png", vec![]),
description: Description {
short: lang_map("A short description"),
long: lang_map("A longer description of the test package"),
},
release_notes: lang_map("Initial release"),
git_hash: None,
license: "MIT".into(),
wrapper_repo: "https://github.com/example/wrapper".parse().unwrap(),
upstream_repo: "https://github.com/example/upstream".parse().unwrap(),
support_site: "https://example.com/support".parse().unwrap(),
marketing_site: "https://example.com".parse().unwrap(),
donation_url: None,
docs_url: None,
alerts: Alerts::default(),
dependency_metadata: BTreeMap::new(),
os_version: exver::Version::new([0, 3, 6], []),
sdk_version: None,
hardware_acceleration: false,
},
source_version: None,
s9pks: Vec::new(),
};
from_value::<PackageInfoShort>(to_value(&info).unwrap()).unwrap();
}

View File

@@ -1,10 +1,13 @@
use std::ops::Deref;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use clap::Parser; use clap::Parser;
use rpc_toolkit::{Empty, HandlerExt, ParentHandler, from_fn_async}; use rpc_toolkit::{Empty, HandlerExt, ParentHandler, from_fn_async};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::process::Command;
use ts_rs::TS; use ts_rs::TS;
use url::Url;
use crate::ImageId; use crate::ImageId;
use crate::context::CliContext; use crate::context::CliContext;
@@ -13,9 +16,9 @@ use crate::s9pk::manifest::Manifest;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::v2::SIG_CONTEXT; use crate::s9pk::v2::SIG_CONTEXT;
use crate::s9pk::v2::pack::ImageConfig; use crate::s9pk::v2::pack::ImageConfig;
use crate::util::Apply;
use crate::util::io::{TmpDir, create_file, open_file}; use crate::util::io::{TmpDir, create_file, open_file};
use crate::util::serde::{HandlerExtSerde, apply_expr}; use crate::util::serde::{HandlerExtSerde, apply_expr};
use crate::util::{Apply, Invoke};
pub const SKIP_ENV: &[&str] = &["TERM", "container", "HOME", "HOSTNAME"]; pub const SKIP_ENV: &[&str] = &["TERM", "container", "HOME", "HOSTNAME"];
@@ -61,6 +64,12 @@ pub fn s9pk() -> ParentHandler<CliContext> {
.no_display() .no_display()
.with_about("about.convert-s9pk-v1-to-v2"), .with_about("about.convert-s9pk-v1-to-v2"),
) )
.subcommand(
"publish",
from_fn_async(publish)
.no_display()
.with_about("about.publish-s9pk"),
)
} }
#[derive(Deserialize, Serialize, Parser)] #[derive(Deserialize, Serialize, Parser)]
@@ -256,3 +265,61 @@ async fn convert(ctx: CliContext, S9pkPath { s9pk: s9pk_path }: S9pkPath) -> Res
tokio::fs::rename(tmp_path, s9pk_path).await?; tokio::fs::rename(tmp_path, s9pk_path).await?;
Ok(()) Ok(())
} }
async fn publish(ctx: CliContext, S9pkPath { s9pk: s9pk_path }: S9pkPath) -> Result<(), Error> {
let filename = s9pk_path.file_name().unwrap().to_string_lossy();
let s9pk = super::S9pk::open(&s9pk_path, None).await?;
let manifest = s9pk.as_manifest();
let path = [
manifest.id.deref(),
manifest.version.as_str(),
filename.deref(),
];
let mut s3url = ctx
.s9pk_s3base
.as_ref()
.ok_or_else(|| Error::new(eyre!("--s9pk-s3base required"), ErrorKind::InvalidRequest))?
.clone();
s3url
.path_segments_mut()
.map_err(|_| {
Error::new(
eyre!("s9pk-s3base is invalid (missing protocol?)"),
ErrorKind::ParseUrl,
)
})?
.pop_if_empty()
.extend(path);
let mut s3dest = format!(
"s3://{}",
ctx.s9pk_s3bucket
.as_deref()
.or_else(|| s3url
.host_str()
.and_then(|h| h.split_once(".").map(|h| h.0)))
.ok_or_else(|| {
Error::new(eyre!("--s9pk-s3bucket required"), ErrorKind::InvalidRequest)
})?,
)
.parse::<Url>()?;
s3dest
.path_segments_mut()
.map_err(|_| {
Error::new(
eyre!("s9pk-s3base is invalid (missing protocol?)"),
ErrorKind::ParseUrl,
)
})?
.pop_if_empty()
.extend(path);
Command::new("s3cmd")
.arg("put")
.arg("-P")
.arg(s9pk_path)
.arg(s3dest.as_str())
.capture(false)
.invoke(ErrorKind::Network)
.await?;
crate::registry::package::add::cli_add_package_impl(ctx, s9pk, vec![s3url], false).await
}

View File

@@ -7,6 +7,7 @@ use clap::Parser;
use futures::future::{BoxFuture, ready}; use futures::future::{BoxFuture, ready};
use futures::{FutureExt, TryStreamExt}; use futures::{FutureExt, TryStreamExt};
use imbl_value::InternedString; use imbl_value::InternedString;
use itertools::Itertools;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::process::Command; use tokio::process::Command;
use tokio::sync::OnceCell; use tokio::sync::OnceCell;
@@ -686,7 +687,7 @@ pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
let manifest = s9pk.as_manifest_mut(); let manifest = s9pk.as_manifest_mut();
manifest.git_hash = Some(GitHash::from_path(params.path()).await?); manifest.git_hash = Some(GitHash::from_path(params.path()).await?);
if !params.arch.is_empty() { if !params.arch.is_empty() {
let arches = match manifest.hardware_requirements.arch.take() { let arches: BTreeSet<InternedString> = match manifest.hardware_requirements.arch.take() {
Some(a) => params Some(a) => params
.arch .arch
.iter() .iter()
@@ -695,10 +696,41 @@ pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
.collect(), .collect(),
None => params.arch.iter().cloned().collect(), None => params.arch.iter().cloned().collect(),
}; };
manifest if arches.is_empty() {
.images return Err(Error::new(
.values_mut() eyre!(
.for_each(|c| c.arch = c.arch.intersection(&arches).cloned().collect()); "none of the requested architectures ({:?}) are supported by this package",
params.arch
),
ErrorKind::InvalidRequest,
));
}
manifest.images.iter_mut().for_each(|(id, c)| {
let filtered = c
.arch
.intersection(&arches)
.cloned()
.collect::<BTreeSet<_>>();
if filtered.is_empty() {
if let Some(arch) = &c.emulate_missing_as {
tracing::warn!(
"ImageId {} is not available for {}, emulating as {}",
id,
arches.iter().join("/"),
arch
);
c.arch = [arch.clone()].into_iter().collect();
} else {
tracing::error!(
"ImageId {} is not available for {}",
id,
arches.iter().join("/"),
);
}
} else {
c.arch = filtered;
}
});
manifest.hardware_requirements.arch = Some(arches); manifest.hardware_requirements.arch = Some(arches);
} }

View File

@@ -10,6 +10,7 @@ use crate::rpc_continuations::Guid;
use crate::service::effects::prelude::*; use crate::service::effects::prelude::*;
use crate::service::persistent_container::Subcontainer; use crate::service::persistent_container::Subcontainer;
use crate::util::Invoke; use crate::util::Invoke;
use crate::util::io::write_file_owned_atomic;
pub const NVIDIA_OVERLAY_PATH: &str = "/var/tmp/startos/nvidia-overlay"; pub const NVIDIA_OVERLAY_PATH: &str = "/var/tmp/startos/nvidia-overlay";
pub const NVIDIA_OVERLAY_DEBIAN: &str = "/var/tmp/startos/nvidia-overlay/debian"; pub const NVIDIA_OVERLAY_DEBIAN: &str = "/var/tmp/startos/nvidia-overlay/debian";
@@ -94,7 +95,7 @@ pub async fn create_subcontainer_fs(
.cloned() .cloned()
{ {
let guid = Guid::new(); let guid = Guid::new();
let rootfs_dir = context let lxc_container = context
.seed .seed
.persistent_container .persistent_container
.lxc_container .lxc_container
@@ -104,8 +105,9 @@ pub async fn create_subcontainer_fs(
eyre!("PersistentContainer has been destroyed"), eyre!("PersistentContainer has been destroyed"),
ErrorKind::Incoherent, ErrorKind::Incoherent,
) )
})? })?;
.rootfs_dir(); let container_guid = &lxc_container.guid;
let rootfs_dir = lxc_container.rootfs_dir();
let mountpoint = rootfs_dir let mountpoint = rootfs_dir
.join("media/startos/subcontainers") .join("media/startos/subcontainers")
.join(guid.as_ref()); .join(guid.as_ref());
@@ -154,6 +156,20 @@ pub async fn create_subcontainer_fs(
.arg(&mountpoint) .arg(&mountpoint)
.invoke(ErrorKind::Filesystem) .invoke(ErrorKind::Filesystem)
.await?; .await?;
write_file_owned_atomic(
mountpoint.join("etc/hostname"),
format!("{container_guid}\n"),
100000,
100000,
)
.await?;
write_file_owned_atomic(
mountpoint.join("etc/hosts"),
format!("127.0.0.1\tlocalhost\n127.0.1.1\t{container_guid}\n::1\tlocalhost ip6-localhost ip6-loopback\n"),
100000,
100000,
)
.await?;
tracing::info!("Mounted overlay {guid} for {image_id}"); tracing::info!("Mounted overlay {guid} for {image_id}");
context context
.seed .seed

View File

@@ -1,7 +1,6 @@
use std::collections::BTreeMap;
use std::ffi::{OsStr, OsString, c_int}; use std::ffi::{OsStr, OsString, c_int};
use std::fs::File; use std::fs::File;
use std::io::{IsTerminal, Read}; use std::io::{BufRead, BufReader, IsTerminal, Read};
use std::os::unix::process::{CommandExt, ExitStatusExt}; use std::os::unix::process::{CommandExt, ExitStatusExt};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process::{Command as StdCommand, Stdio}; use std::process::{Command as StdCommand, Stdio};
@@ -146,95 +145,160 @@ impl ExecParams {
let mut cmd = StdCommand::new(command); let mut cmd = StdCommand::new(command);
let passwd = std::fs::read_to_string(chroot.join("etc/passwd")) let mut uid = Err(None);
.with_ctx(|_| (ErrorKind::Filesystem, "read /etc/passwd")) let mut gid = Err(None);
.log_err() let mut needs_home = true;
.unwrap_or_default();
let mut home = None;
if let Some((uid, gid)) = if let Some(user) = user {
if let Some(uid) = user.as_deref().and_then(|u| u.parse::<u32>().ok()) { if let Some((u, g)) = user.split_once(":") {
Some((uid, uid)) uid = Err(Some(u));
} else if let Some((uid, gid)) = user gid = Err(Some(g));
.as_deref()
.and_then(|u| u.split_once(":"))
.and_then(|(u, g)| Some((u.parse::<u32>().ok()?, g.parse::<u32>().ok()?)))
{
Some((uid, gid))
} else if let Some(user) = user {
Some(
if let Some((uid, gid)) = passwd.lines().find_map(|l| {
let l = l.trim();
let mut split = l.split(":");
if user != split.next()? {
return None;
}
split.next(); // throw away x
let uid = split.next()?.parse().ok()?;
let gid = split.next()?.parse().ok()?;
split.next(); // throw away group name
home = split.next();
Some((uid, gid))
// uid gid
}) {
(uid, gid)
} else if user == "root" {
(0, 0)
} else {
None.or_not_found(lazy_format!("{user} in /etc/passwd"))?
},
)
} else { } else {
None uid = Err(Some(user));
} }
{
if home.is_none() {
home = passwd.lines().find_map(|l| {
let l = l.trim();
let mut split = l.split(":");
split.next(); // throw away user name
split.next(); // throw away x
if split.next()?.parse::<u32>().ok()? != uid {
return None;
}
split.next(); // throw away gid
split.next(); // throw away group name
split.next()
})
};
std::os::unix::fs::chown("/proc/self/fd/0", Some(uid), Some(gid)).ok();
std::os::unix::fs::chown("/proc/self/fd/1", Some(uid), Some(gid)).ok();
std::os::unix::fs::chown("/proc/self/fd/2", Some(uid), Some(gid)).ok();
cmd.uid(uid);
cmd.gid(gid);
} else {
home = Some("/root");
} }
cmd.env("HOME", home.unwrap_or("/"));
let env_string = if let Some(env_file) = &env_file { if let Some(u) = uid.err().flatten().and_then(|u| u.parse::<u32>().ok()) {
std::fs::read_to_string(env_file) uid = Ok(u);
.with_ctx(|_| (ErrorKind::Filesystem, lazy_format!("read {env:?}")))? }
} else { if let Some(g) = gid.err().flatten().and_then(|g| g.parse::<u32>().ok()) {
Default::default() gid = Ok(g);
}
let mut update_env = |line: &str| {
if let Some((k, v)) = line.split_once("=") {
needs_home &= k != "HOME";
cmd.env(k, v);
} else {
tracing::warn!("Invalid line in env: {line}");
}
}; };
let env = env_string if let Some(f) = env_file {
.lines() let mut lines = BufReader::new(
.chain(env.iter().map(|l| l.as_str())) File::open(&f).with_ctx(|_| (ErrorKind::Filesystem, format!("open r {f:?}")))?,
.map(|l| l.trim()) )
.filter_map(|l| l.split_once("=")) .lines();
.collect::<BTreeMap<_, _>>(); while let Some(line) = lines.next().transpose()? {
update_env(&line);
}
}
for line in env {
update_env(&line);
}
let needs_gid = Err(None) == gid;
let mut username = InternedString::intern("root");
let mut handle_passwd_line = |line: &str| -> Option<()> {
let l = line.trim();
let mut split = l.split(":");
let user = split.next()?;
match uid {
Err(Some(u)) if u != user => return None,
_ => (),
}
split.next(); // throw away x
let u: u32 = split.next()?.parse().ok()?;
match uid {
Err(Some(_)) => uid = Ok(u),
Err(None) if u == 0 => uid = Ok(u),
Ok(uid) if uid != u => return None,
_ => (),
}
username = user.into();
if !needs_gid && !needs_home {
return Some(());
}
let g = split.next()?;
if needs_gid {
gid = Ok(g.parse().ok()?);
}
if needs_home {
split.next(); // throw away group name
let home = split.next()?;
cmd.env("HOME", home);
}
Some(())
};
let mut lines = BufReader::new(
File::open(chroot.join("etc/passwd"))
.with_ctx(|_| (ErrorKind::Filesystem, format!("open r /etc/passwd")))?,
)
.lines();
while let Some(line) = lines.next().transpose()? {
if handle_passwd_line(&line).is_some() {
break;
}
}
let mut groups = Vec::new();
let mut handle_group_line = |line: &str| -> Option<()> {
let l = line.trim();
let mut split = l.split(":");
let name = split.next()?;
split.next()?; // throw away x
let g = split.next()?.parse::<u32>().ok()?;
match gid {
Err(Some(n)) if n == name => gid = Ok(g),
_ => (),
}
let users = split.next()?;
if users.split(",").any(|u| u == &*username) {
groups.push(nix::unistd::Gid::from_raw(g));
}
Some(())
};
let mut lines = BufReader::new(
File::open(chroot.join("etc/group"))
.with_ctx(|_| (ErrorKind::Filesystem, format!("open r /etc/group")))?,
)
.lines();
while let Some(line) = lines.next().transpose()? {
if handle_group_line(&line).is_none() {
tracing::warn!("Invalid /etc/group line: {line}");
}
}
std::os::unix::fs::chroot(chroot) std::os::unix::fs::chroot(chroot)
.with_ctx(|_| (ErrorKind::Filesystem, lazy_format!("chroot {chroot:?}")))?; .with_ctx(|_| (ErrorKind::Filesystem, lazy_format!("chroot {chroot:?}")))?;
cmd.args(args); if let Ok(uid) = uid {
for (k, v) in env { if uid != 0 {
cmd.env(k, v); std::os::unix::fs::chown("/proc/self/fd/0", Some(uid), gid.ok()).ok();
std::os::unix::fs::chown("/proc/self/fd/1", Some(uid), gid.ok()).ok();
std::os::unix::fs::chown("/proc/self/fd/2", Some(uid), gid.ok()).ok();
}
} }
// Handle credential changes in pre_exec to control the order:
// setgroups must happen before setgid/setuid (requires CAP_SETGID)
{
let set_uid = uid.ok();
let set_gid = gid.ok();
unsafe {
cmd.pre_exec(move || {
if !groups.is_empty() {
nix::unistd::setgroups(&groups)
.map_err(|e| std::io::Error::from_raw_os_error(e as i32))?;
}
if let Some(gid) = set_gid {
nix::unistd::setgid(nix::unistd::Gid::from_raw(gid))
.map_err(|e| std::io::Error::from_raw_os_error(e as i32))?;
}
if let Some(uid) = set_uid {
nix::unistd::setuid(nix::unistd::Uid::from_raw(uid))
.map_err(|e| std::io::Error::from_raw_os_error(e as i32))?;
}
Ok(())
});
}
}
cmd.args(args);
if let Some(workdir) = workdir { if let Some(workdir) = workdir {
cmd.current_dir(workdir); cmd.current_dir(workdir);

View File

@@ -50,6 +50,7 @@ use crate::util::io::{AsyncReadStream, AtomicFile, TermSize, delete_file};
use crate::util::net::WebSocket; use crate::util::net::WebSocket;
use crate::util::serde::Pem; use crate::util::serde::Pem;
use crate::util::sync::SyncMutex; use crate::util::sync::SyncMutex;
use crate::util::tui::choose;
use crate::volume::data_dir; use crate::volume::data_dir;
use crate::{ActionId, CAP_1_KiB, DATA_DIR, HostId, ImageId, PackageId}; use crate::{ActionId, CAP_1_KiB, DATA_DIR, HostId, ImageId, PackageId};
@@ -709,6 +710,19 @@ pub async fn rebuild(ctx: RpcContext, RebuildParams { id }: RebuildParams) -> Re
Ok(()) Ok(())
} }
#[derive(Debug, Deserialize, Serialize)]
pub struct SubcontainerInfo {
pub id: Guid,
pub name: InternedString,
pub image_id: ImageId,
}
impl std::fmt::Display for SubcontainerInfo {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let SubcontainerInfo { id, name, image_id } = self;
write!(f, "{id} => Name: {name}; Image: {image_id}")
}
}
#[derive(Deserialize, Serialize, TS)] #[derive(Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct AttachParams { pub struct AttachParams {
@@ -722,7 +736,7 @@ pub struct AttachParams {
#[serde(rename = "__Auth_session")] #[serde(rename = "__Auth_session")]
session: Option<InternedString>, session: Option<InternedString>,
#[ts(type = "string | null")] #[ts(type = "string | null")]
subcontainer: Option<InternedString>, subcontainer: Option<Guid>,
#[ts(type = "string | null")] #[ts(type = "string | null")]
name: Option<InternedString>, name: Option<InternedString>,
#[ts(type = "string | null")] #[ts(type = "string | null")]
@@ -745,7 +759,7 @@ pub async fn attach(
user, user,
}: AttachParams, }: AttachParams,
) -> Result<Guid, Error> { ) -> Result<Guid, Error> {
let (container_id, subcontainer_id, image_id, workdir, root_command) = { let (container_id, subcontainer_id, image_id, user, workdir, root_command) = {
let id = &id; let id = &id;
let service = ctx.services.get(id).await; let service = ctx.services.get(id).await;
@@ -786,13 +800,6 @@ pub async fn attach(
} }
}) })
.collect(); .collect();
let format_subcontainer_pair = |(guid, wrapper): (&Guid, &Subcontainer)| {
format!(
"{guid} imageId: {image_id} name: \"{name}\"",
name = &wrapper.name,
image_id = &wrapper.image_id
)
};
let Some((subcontainer_id, image_id)) = subcontainer_ids let Some((subcontainer_id, image_id)) = subcontainer_ids
.first() .first()
.map::<(Guid, ImageId), _>(|&x| (x.0.clone(), x.1.image_id.clone())) .map::<(Guid, ImageId), _>(|&x| (x.0.clone(), x.1.image_id.clone()))
@@ -803,19 +810,17 @@ pub async fn attach(
.lock() .lock()
.await .await
.iter() .iter()
.map(format_subcontainer_pair) .map(|(g, s)| SubcontainerInfo {
.join("\n"); id: g.clone(),
name: s.name.clone(),
image_id: s.image_id.clone(),
})
.collect::<Vec<_>>();
return Err(Error::new( return Err(Error::new(
eyre!( eyre!("{}", t!("service.mod.no-matching-subcontainers", id = id)),
"{}",
t!(
"service.mod.no-matching-subcontainers",
id = id,
subcontainers = subcontainers
)
),
ErrorKind::NotFound, ErrorKind::NotFound,
)); )
.with_info(to_value(&subcontainers)?));
}; };
let passwd = root_dir let passwd = root_dir
@@ -835,38 +840,39 @@ pub async fn attach(
) )
.with_kind(ErrorKind::Deserialization)?; .with_kind(ErrorKind::Deserialization)?;
let root_command = get_passwd_command( let user = user
passwd, .clone()
user.as_deref() .or_else(|| image_meta["user"].as_str().map(InternedString::intern))
.or_else(|| image_meta["user"].as_str()) .unwrap_or_else(|| InternedString::intern("root"));
.unwrap_or("root"),
) let root_command = get_passwd_command(passwd, &*user).await;
.await;
let workdir = image_meta["workdir"].as_str().map(|s| s.to_owned()); let workdir = image_meta["workdir"].as_str().map(|s| s.to_owned());
if subcontainer_ids.len() > 1 { if subcontainer_ids.len() > 1 {
let subcontainer_ids = subcontainer_ids let subcontainers = subcontainer_ids
.into_iter() .into_iter()
.map(format_subcontainer_pair) .map(|(g, s)| SubcontainerInfo {
.join("\n"); id: g.clone(),
name: s.name.clone(),
image_id: s.image_id.clone(),
})
.collect::<Vec<_>>();
return Err(Error::new( return Err(Error::new(
eyre!( eyre!(
"{}", "{}",
t!( t!("service.mod.multiple-subcontainers-found", id = id,)
"service.mod.multiple-subcontainers-found",
id = id,
subcontainer_ids = subcontainer_ids
)
), ),
ErrorKind::InvalidRequest, ErrorKind::InvalidRequest,
)); )
.with_info(to_value(&subcontainers)?));
} }
( (
service_ref.container_id()?, service_ref.container_id()?,
subcontainer_id, subcontainer_id,
image_id, image_id,
user.into(),
workdir, workdir,
root_command, root_command,
) )
@@ -883,7 +889,7 @@ pub async fn attach(
pty_size: Option<TermSize>, pty_size: Option<TermSize>,
image_id: ImageId, image_id: ImageId,
workdir: Option<String>, workdir: Option<String>,
user: Option<InternedString>, user: InternedString,
root_command: &RootCommand, root_command: &RootCommand,
) -> Result<(), Error> { ) -> Result<(), Error> {
use axum::extract::ws::Message; use axum::extract::ws::Message;
@@ -904,11 +910,9 @@ pub async fn attach(
Path::new("/media/startos/images") Path::new("/media/startos/images")
.join(image_id) .join(image_id)
.with_extension("env"), .with_extension("env"),
); )
.arg("--user")
if let Some(user) = user { .arg(&*user);
cmd.arg("--user").arg(&*user);
}
if let Some(workdir) = workdir { if let Some(workdir) = workdir {
cmd.arg("--workdir").arg(workdir); cmd.arg("--workdir").arg(workdir);
@@ -1091,45 +1095,6 @@ pub async fn attach(
Ok(guid) Ok(guid)
} }
#[derive(Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
pub struct ListSubcontainersParams {
pub id: PackageId,
}
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
pub struct SubcontainerInfo {
pub name: InternedString,
pub image_id: ImageId,
}
pub async fn list_subcontainers(
ctx: RpcContext,
ListSubcontainersParams { id }: ListSubcontainersParams,
) -> Result<BTreeMap<Guid, SubcontainerInfo>, Error> {
let service = ctx.services.get(&id).await;
let service_ref = service.as_ref().or_not_found(&id)?;
let container = &service_ref.seed.persistent_container;
let subcontainers = container.subcontainers.lock().await;
let result: BTreeMap<Guid, SubcontainerInfo> = subcontainers
.iter()
.map(|(guid, subcontainer)| {
(
guid.clone(),
SubcontainerInfo {
name: subcontainer.name.clone(),
image_id: subcontainer.image_id.clone(),
},
)
})
.collect();
Ok(result)
}
async fn get_passwd_command(etc_passwd_path: PathBuf, user: &str) -> RootCommand { async fn get_passwd_command(etc_passwd_path: PathBuf, user: &str) -> RootCommand {
async { async {
let mut file = tokio::fs::File::open(etc_passwd_path).await?; let mut file = tokio::fs::File::open(etc_passwd_path).await?;
@@ -1210,23 +1175,34 @@ pub async fn cli_attach(
None None
}; };
let method = parent_method.into_iter().chain(method).join(".");
let mut params = json!({
"id": params.id,
"command": params.command,
"tty": tty,
"stderrTty": stderr.is_terminal(),
"ptySize": if tty { TermSize::get_current() } else { None },
"subcontainer": params.subcontainer,
"imageId": params.image_id,
"name": params.name,
"user": params.user,
});
let guid: Guid = from_value( let guid: Guid = from_value(
context match context
.call_remote::<RpcContext>( .call_remote::<RpcContext>(&method, params.clone())
&parent_method.into_iter().chain(method).join("."), .await
json!({ {
"id": params.id, Ok(a) => a,
"command": params.command, Err(e) => {
"tty": tty, let prompt = e.to_string();
"stderrTty": stderr.is_terminal(), let options: Vec<SubcontainerInfo> = from_value(e.info)?;
"ptySize": if tty { TermSize::get_current() } else { None }, let choice = choose(&prompt, &options).await?;
"subcontainer": params.subcontainer, params["subcontainer"] = to_value(&choice.id)?;
"imageId": params.image_id, context
"name": params.name, .call_remote::<RpcContext>(&method, params.clone())
"user": params.user, .await?
}), }
) },
.await?,
)?; )?;
let mut ws = context.ws_continuation(guid).await?; let mut ws = context.ws_continuation(guid).await?;

View File

@@ -524,26 +524,26 @@ pub async fn init_web(ctx: CliContext) -> Result<(), Error> {
"To access your Web URL securely, trust your Root CA (displayed above) on your client device(s):\n", "To access your Web URL securely, trust your Root CA (displayed above) on your client device(s):\n",
" - MacOS\n", " - MacOS\n",
" 1. Open the Terminal app\n", " 1. Open the Terminal app\n",
" 2. Paste the following command (**DO NOTt** click Return): pbcopy < ~/Desktop/ca.crt\n", " 2. Paste the following command (**DO NOT** click Return): pbcopy < ~/Desktop/ca.crt\n",
" 3. Copy your Root CA (including -----BEGIN CERTIFICATE----- and -----END CERTIFICATE-----)\n", " 3. Copy your Root CA (including -----BEGIN CERTIFICATE----- and -----END CERTIFICATE-----)\n",
" 4. Back in Terminal, click Return. ca.crt is saved to your Desktop\n", " 4. Back in Terminal, click Return. ca.crt is saved to your Desktop\n",
" 5. Complete by trusting your Root CA: https://https://staging.docs.start9.com/device-guides/mac/ca.html\n", " 5. Complete by trusting your Root CA: https://staging.docs.start9.com/device-guides/mac/ca.html\n",
" - Linux\n", " - Linux\n",
" 1. Open gedit, nano, or any editor\n", " 1. Open gedit, nano, or any editor\n",
" 2. Copy/paste your Root CA (including -----BEGIN CERTIFICATE----- and -----END CERTIFICATE-----)\n", " 2. Copy/paste your Root CA (including -----BEGIN CERTIFICATE----- and -----END CERTIFICATE-----)\n",
" 3. Name the file ca.crt and save as plaintext\n", " 3. Name the file ca.crt and save as plaintext\n",
" 5. Complete by trusting your Root CA: https://https://staging.docs.start9.com/device-guides/linux/ca.html\n", " 5. Complete by trusting your Root CA: https://staging.docs.start9.com/device-guides/linux/ca.html\n",
" - Windows\n", " - Windows\n",
" 1. Open the Notepad app\n", " 1. Open the Notepad app\n",
" 2. Copy/paste your Root CA (including -----BEGIN CERTIFICATE----- and -----END CERTIFICATE-----)\n", " 2. Copy/paste your Root CA (including -----BEGIN CERTIFICATE----- and -----END CERTIFICATE-----)\n",
" 3. Name the file ca.crt and save as plaintext\n", " 3. Name the file ca.crt and save as plaintext\n",
" 5. Complete by trusting your Root CA: https://https://staging.docs.start9.com/device-guides/windows/ca.html\n", " 5. Complete by trusting your Root CA: https://staging.docs.start9.com/device-guides/windows/ca.html\n",
" - Android/Graphene\n", " - Android/Graphene\n",
" 1. Send the ca.crt file (created above) to yourself\n", " 1. Send the ca.crt file (created above) to yourself\n",
" 2. Complete by trusting your Root CA: https://https://staging.docs.start9.com/device-guides/android/ca.html\n", " 2. Complete by trusting your Root CA: https://staging.docs.start9.com/device-guides/android/ca.html\n",
" - iOS\n", " - iOS\n",
" 1. Send the ca.crt file (created above) to yourself\n", " 1. Send the ca.crt file (created above) to yourself\n",
" 2. Complete by trusting your Root CA: https://https://staging.docs.start9.com/device-guides/ios/ca.html\n", " 2. Complete by trusting your Root CA: https://staging.docs.start9.com/device-guides/ios/ca.html\n",
)); ));
return Ok(()); return Ok(());

View File

@@ -6,6 +6,7 @@ use clap::{ArgAction, Parser};
use color_eyre::eyre::{Result, eyre}; use color_eyre::eyre::{Result, eyre};
use exver::{Version, VersionRange}; use exver::{Version, VersionRange};
use futures::TryStreamExt; use futures::TryStreamExt;
use imbl::OrdMap;
use imbl_value::json; use imbl_value::json;
use itertools::Itertools; use itertools::Itertools;
use patch_db::json_ptr::JsonPointer; use patch_db::json_ptr::JsonPointer;
@@ -245,6 +246,7 @@ async fn maybe_do_update(
let mut available = from_value::<BTreeMap<Version, OsVersionInfo>>( let mut available = from_value::<BTreeMap<Version, OsVersionInfo>>(
ctx.call_remote_with::<RegistryContext, _>( ctx.call_remote_with::<RegistryContext, _>(
"os.version.get", "os.version.get",
OrdMap::new(),
json!({ json!({
"source": current_version, "source": current_version,
"target": target, "target": target,

View File

@@ -248,7 +248,7 @@ impl<'a> Invoke<'a> for ExtendedCommand<'a> {
.or(Some(&res.stdout)) .or(Some(&res.stdout))
.filter(|a| !a.is_empty()) .filter(|a| !a.is_empty())
.and_then(|a| std::str::from_utf8(a).ok()) .and_then(|a| std::str::from_utf8(a).ok())
.unwrap_or(&format!("{} exited with code {}", cmd_str, res.status)) .unwrap_or(&format!("{} exited with {}", cmd_str, res.status))
); );
Ok(res.stdout) Ok(res.stdout)
} else { } else {
@@ -309,7 +309,7 @@ impl<'a> Invoke<'a> for ExtendedCommand<'a> {
.filter(|a| !a.is_empty()) .filter(|a| !a.is_empty())
.and_then(|a| std::str::from_utf8(a).ok()) .and_then(|a| std::str::from_utf8(a).ok())
.unwrap_or(&format!( .unwrap_or(&format!(
"{} exited with code {}", "{} exited with {}",
cmd.as_std().get_program().to_string_lossy(), cmd.as_std().get_program().to_string_lossy(),
res.status res.status
)) ))

View File

@@ -58,8 +58,9 @@ mod v0_4_0_alpha_15;
mod v0_4_0_alpha_16; mod v0_4_0_alpha_16;
mod v0_4_0_alpha_17; mod v0_4_0_alpha_17;
mod v0_4_0_alpha_18; mod v0_4_0_alpha_18;
mod v0_4_0_alpha_19;
pub type Current = v0_4_0_alpha_18::Version; // VERSION_BUMP pub type Current = v0_4_0_alpha_19::Version; // VERSION_BUMP
impl Current { impl Current {
#[instrument(skip(self, db))] #[instrument(skip(self, db))]
@@ -179,7 +180,8 @@ enum Version {
V0_4_0_alpha_15(Wrapper<v0_4_0_alpha_15::Version>), V0_4_0_alpha_15(Wrapper<v0_4_0_alpha_15::Version>),
V0_4_0_alpha_16(Wrapper<v0_4_0_alpha_16::Version>), V0_4_0_alpha_16(Wrapper<v0_4_0_alpha_16::Version>),
V0_4_0_alpha_17(Wrapper<v0_4_0_alpha_17::Version>), V0_4_0_alpha_17(Wrapper<v0_4_0_alpha_17::Version>),
V0_4_0_alpha_18(Wrapper<v0_4_0_alpha_18::Version>), // VERSION_BUMP V0_4_0_alpha_18(Wrapper<v0_4_0_alpha_18::Version>),
V0_4_0_alpha_19(Wrapper<v0_4_0_alpha_19::Version>), // VERSION_BUMP
Other(exver::Version), Other(exver::Version),
} }
@@ -240,7 +242,8 @@ impl Version {
Self::V0_4_0_alpha_15(v) => DynVersion(Box::new(v.0)), Self::V0_4_0_alpha_15(v) => DynVersion(Box::new(v.0)),
Self::V0_4_0_alpha_16(v) => DynVersion(Box::new(v.0)), Self::V0_4_0_alpha_16(v) => DynVersion(Box::new(v.0)),
Self::V0_4_0_alpha_17(v) => DynVersion(Box::new(v.0)), Self::V0_4_0_alpha_17(v) => DynVersion(Box::new(v.0)),
Self::V0_4_0_alpha_18(v) => DynVersion(Box::new(v.0)), // VERSION_BUMP Self::V0_4_0_alpha_18(v) => DynVersion(Box::new(v.0)),
Self::V0_4_0_alpha_19(v) => DynVersion(Box::new(v.0)), // VERSION_BUMP
Self::Other(v) => { Self::Other(v) => {
return Err(Error::new( return Err(Error::new(
eyre!("unknown version {v}"), eyre!("unknown version {v}"),
@@ -293,7 +296,8 @@ impl Version {
Version::V0_4_0_alpha_15(Wrapper(x)) => x.semver(), Version::V0_4_0_alpha_15(Wrapper(x)) => x.semver(),
Version::V0_4_0_alpha_16(Wrapper(x)) => x.semver(), Version::V0_4_0_alpha_16(Wrapper(x)) => x.semver(),
Version::V0_4_0_alpha_17(Wrapper(x)) => x.semver(), Version::V0_4_0_alpha_17(Wrapper(x)) => x.semver(),
Version::V0_4_0_alpha_18(Wrapper(x)) => x.semver(), // VERSION_BUMP Version::V0_4_0_alpha_18(Wrapper(x)) => x.semver(),
Version::V0_4_0_alpha_19(Wrapper(x)) => x.semver(), // VERSION_BUMP
Version::Other(x) => x.clone(), Version::Other(x) => x.clone(),
} }
} }

View File

@@ -0,0 +1,37 @@
use exver::{PreReleaseSegment, VersionRange};
use super::v0_3_5::V0_3_0_COMPAT;
use super::{VersionT, v0_4_0_alpha_18};
use crate::prelude::*;
lazy_static::lazy_static! {
static ref V0_4_0_alpha_19: exver::Version = exver::Version::new(
[0, 4, 0],
[PreReleaseSegment::String("alpha".into()), 19.into()]
);
}
#[derive(Clone, Copy, Debug, Default)]
pub struct Version;
impl VersionT for Version {
type Previous = v0_4_0_alpha_18::Version;
type PreUpRes = ();
async fn pre_up(self) -> Result<Self::PreUpRes, Error> {
Ok(())
}
fn semver(self) -> exver::Version {
V0_4_0_alpha_19.clone()
}
fn compat(self) -> &'static VersionRange {
&V0_3_0_COMPAT
}
#[instrument(skip_all)]
fn up(self, _db: &mut Value, _: Self::PreUpRes) -> Result<Value, Error> {
Ok(Value::Null)
}
fn down(self, _db: &mut Value) -> Result<(), Error> {
Ok(())
}
}

View File

@@ -16,14 +16,14 @@ import {
MountParams, MountParams,
StatusInfo, StatusInfo,
Manifest, Manifest,
} from "./osBindings" } from './osBindings'
import { import {
PackageId, PackageId,
Dependencies, Dependencies,
ServiceInterfaceId, ServiceInterfaceId,
SmtpValue, SmtpValue,
ActionResult, ActionResult,
} from "./types" } from './types'
/** Used to reach out from the pure js runtime */ /** Used to reach out from the pure js runtime */
@@ -155,13 +155,13 @@ export type Effects = {
/** Returns a PEM encoded fullchain for the hostnames specified */ /** Returns a PEM encoded fullchain for the hostnames specified */
getSslCertificate: (options: { getSslCertificate: (options: {
hostnames: string[] hostnames: string[]
algorithm?: "ecdsa" | "ed25519" algorithm?: 'ecdsa' | 'ed25519'
callback?: () => void callback?: () => void
}) => Promise<[string, string, string]> }) => Promise<[string, string, string]>
/** Returns a PEM encoded private key corresponding to the certificate for the hostnames specified */ /** Returns a PEM encoded private key corresponding to the certificate for the hostnames specified */
getSslKey: (options: { getSslKey: (options: {
hostnames: string[] hostnames: string[]
algorithm?: "ecdsa" | "ed25519" algorithm?: 'ecdsa' | 'ed25519'
}) => Promise<string> }) => Promise<string>
/** sets the version that this service's data has been migrated to */ /** sets the version that this service's data has been migrated to */

View File

@@ -1,7 +1,7 @@
import * as T from "../types" import * as T from '../types'
import * as IST from "../actions/input/inputSpecTypes" import * as IST from '../actions/input/inputSpecTypes'
import { Action, ActionInfo } from "./setupActions" import { Action, ActionInfo } from './setupActions'
import { ExtractInputSpecType } from "./input/builder/inputSpec" import { ExtractInputSpecType } from './input/builder/inputSpec'
export type RunActionInput<Input> = export type RunActionInput<Input> =
| Input | Input
@@ -53,17 +53,17 @@ type TaskBase = {
replayId?: string replayId?: string
} }
type TaskInput<T extends ActionInfo<T.ActionId, any>> = { type TaskInput<T extends ActionInfo<T.ActionId, any>> = {
kind: "partial" kind: 'partial'
value: T.DeepPartial<GetActionInputType<T>> value: T.DeepPartial<GetActionInputType<T>>
} }
export type TaskOptions<T extends ActionInfo<T.ActionId, any>> = TaskBase & export type TaskOptions<T extends ActionInfo<T.ActionId, any>> = TaskBase &
( (
| { | {
when?: Exclude<T.TaskTrigger, { condition: "input-not-matches" }> when?: Exclude<T.TaskTrigger, { condition: 'input-not-matches' }>
input?: TaskInput<T> input?: TaskInput<T>
} }
| { | {
when: T.TaskTrigger & { condition: "input-not-matches" } when: T.TaskTrigger & { condition: 'input-not-matches' }
input: TaskInput<T> input: TaskInput<T>
} }
) )

View File

@@ -1,6 +1,6 @@
import { InputSpec } from "./inputSpec" import { InputSpec } from './inputSpec'
import { List } from "./list" import { List } from './list'
import { Value } from "./value" import { Value } from './value'
import { Variants } from "./variants" import { Variants } from './variants'
export { InputSpec as InputSpec, List, Value, Variants } export { InputSpec as InputSpec, List, Value, Variants }

View File

@@ -1,9 +1,9 @@
import { ValueSpec } from "../inputSpecTypes" import { ValueSpec } from '../inputSpecTypes'
import { Value } from "./value" import { Value } from './value'
import { _ } from "../../../util" import { _ } from '../../../util'
import { Effects } from "../../../Effects" import { Effects } from '../../../Effects'
import { Parser, object } from "ts-matches" import { Parser, object } from 'ts-matches'
import { DeepPartial } from "../../../types" import { DeepPartial } from '../../../types'
export type LazyBuildOptions = { export type LazyBuildOptions = {
effects: Effects effects: Effects

View File

@@ -1,4 +1,4 @@
import { InputSpec, LazyBuild } from "./inputSpec" import { InputSpec, LazyBuild } from './inputSpec'
import { import {
ListValueSpecText, ListValueSpecText,
Pattern, Pattern,
@@ -6,8 +6,8 @@ import {
UniqueBy, UniqueBy,
ValueSpecList, ValueSpecList,
ValueSpecListOf, ValueSpecListOf,
} from "../inputSpecTypes" } from '../inputSpecTypes'
import { Parser, arrayOf, string } from "ts-matches" import { Parser, arrayOf, string } from 'ts-matches'
export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> { export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
private constructor( private constructor(
@@ -55,7 +55,7 @@ export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
* @description Informs the browser how to behave and which keyboard to display on mobile * @description Informs the browser how to behave and which keyboard to display on mobile
* @default "text" * @default "text"
*/ */
inputmode?: ListValueSpecText["inputmode"] inputmode?: ListValueSpecText['inputmode']
/** /**
* @description Displays a button that will generate a random string according to the provided charset and len attributes. * @description Displays a button that will generate a random string according to the provided charset and len attributes.
*/ */
@@ -65,21 +65,21 @@ export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const validator = arrayOf(string) const validator = arrayOf(string)
return new List<string[]>(() => { return new List<string[]>(() => {
const spec = { const spec = {
type: "text" as const, type: 'text' as const,
placeholder: null, placeholder: null,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
masked: false, masked: false,
inputmode: "text" as const, inputmode: 'text' as const,
generate: null, generate: null,
patterns: aSpec.patterns || [], patterns: aSpec.patterns || [],
...aSpec, ...aSpec,
} }
const built: ValueSpecListOf<"text"> = { const built: ValueSpecListOf<'text'> = {
description: null, description: null,
warning: null, warning: null,
default: [], default: [],
type: "list" as const, type: 'list' as const,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
disabled: false, disabled: false,
@@ -106,7 +106,7 @@ export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
minLength?: number | null minLength?: number | null
maxLength?: number | null maxLength?: number | null
patterns?: Pattern[] patterns?: Pattern[]
inputmode?: ListValueSpecText["inputmode"] inputmode?: ListValueSpecText['inputmode']
} }
}>, }>,
) { ) {
@@ -114,21 +114,21 @@ export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new List<string[]>(async (options) => { return new List<string[]>(async (options) => {
const { spec: aSpec, ...a } = await getA(options) const { spec: aSpec, ...a } = await getA(options)
const spec = { const spec = {
type: "text" as const, type: 'text' as const,
placeholder: null, placeholder: null,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
masked: false, masked: false,
inputmode: "text" as const, inputmode: 'text' as const,
generate: null, generate: null,
patterns: aSpec.patterns || [], patterns: aSpec.patterns || [],
...aSpec, ...aSpec,
} }
const built: ValueSpecListOf<"text"> = { const built: ValueSpecListOf<'text'> = {
description: null, description: null,
warning: null, warning: null,
default: [], default: [],
type: "list" as const, type: 'list' as const,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
disabled: false, disabled: false,
@@ -162,7 +162,7 @@ export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const { spec: previousSpecSpec, ...restSpec } = aSpec const { spec: previousSpecSpec, ...restSpec } = aSpec
const built = await previousSpecSpec.build(options) const built = await previousSpecSpec.build(options)
const spec = { const spec = {
type: "object" as const, type: 'object' as const,
displayAs: null, displayAs: null,
uniqueBy: null, uniqueBy: null,
...restSpec, ...restSpec,
@@ -179,7 +179,7 @@ export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
warning: null, warning: null,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
type: "list" as const, type: 'list' as const,
disabled: false, disabled: false,
...value, ...value,
}, },

View File

@@ -1,6 +1,6 @@
import { InputSpec, LazyBuild } from "./inputSpec" import { InputSpec, LazyBuild } from './inputSpec'
import { List } from "./list" import { List } from './list'
import { UnionRes, UnionResStaticValidatedAs, Variants } from "./variants" import { UnionRes, UnionResStaticValidatedAs, Variants } from './variants'
import { import {
Pattern, Pattern,
RandomString, RandomString,
@@ -9,9 +9,9 @@ import {
ValueSpecHidden, ValueSpecHidden,
ValueSpecText, ValueSpecText,
ValueSpecTextarea, ValueSpecTextarea,
} from "../inputSpecTypes" } from '../inputSpecTypes'
import { DefaultString } from "../inputSpecTypes" import { DefaultString } from '../inputSpecTypes'
import { _, once } from "../../../util" import { _, once } from '../../../util'
import { import {
Parser, Parser,
any, any,
@@ -23,8 +23,8 @@ import {
number, number,
object, object,
string, string,
} from "ts-matches" } from 'ts-matches'
import { DeepPartial } from "../../../types" import { DeepPartial } from '../../../types'
export const fileInfoParser = object({ export const fileInfoParser = object({
path: string, path: string,
@@ -42,7 +42,7 @@ const testForAsRequiredParser = once(
function asRequiredParser<Type, Input extends { required: boolean }>( function asRequiredParser<Type, Input extends { required: boolean }>(
parser: Parser<unknown, Type>, parser: Parser<unknown, Type>,
input: Input, input: Input,
): Parser<unknown, AsRequired<Type, Input["required"]>> { ): Parser<unknown, AsRequired<Type, Input['required']>> {
if (testForAsRequiredParser()(input)) return parser as any if (testForAsRequiredParser()(input)) return parser as any
return parser.nullable() as any return parser.nullable() as any
} }
@@ -92,7 +92,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
spec: { spec: {
description: null, description: null,
warning: null, warning: null,
type: "toggle" as const, type: 'toggle' as const,
disabled: false, disabled: false,
immutable: a.immutable ?? false, immutable: a.immutable ?? false,
...a, ...a,
@@ -117,7 +117,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
spec: { spec: {
description: null, description: null,
warning: null, warning: null,
type: "toggle" as const, type: 'toggle' as const,
disabled: false, disabled: false,
immutable: false, immutable: false,
...(await a(options)), ...(await a(options)),
@@ -191,7 +191,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
* @description Informs the browser how to behave and which keyboard to display on mobile * @description Informs the browser how to behave and which keyboard to display on mobile
* @default "text" * @default "text"
*/ */
inputmode?: ValueSpecText["inputmode"] inputmode?: ValueSpecText['inputmode']
/** /**
* @description Once set, the value can never be changed. * @description Once set, the value can never be changed.
* @default false * @default false
@@ -206,7 +206,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<AsRequired<string, Required>>( return new Value<AsRequired<string, Required>>(
async () => ({ async () => ({
spec: { spec: {
type: "text" as const, type: 'text' as const,
description: null, description: null,
warning: null, warning: null,
masked: false, masked: false,
@@ -214,7 +214,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
minLength: null, minLength: null,
maxLength: null, maxLength: null,
patterns: [], patterns: [],
inputmode: "text", inputmode: 'text',
disabled: false, disabled: false,
immutable: a.immutable ?? false, immutable: a.immutable ?? false,
generate: a.generate ?? null, generate: a.generate ?? null,
@@ -237,7 +237,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
minLength?: number | null minLength?: number | null
maxLength?: number | null maxLength?: number | null
patterns?: Pattern[] patterns?: Pattern[]
inputmode?: ValueSpecText["inputmode"] inputmode?: ValueSpecText['inputmode']
disabled?: string | false disabled?: string | false
generate?: null | RandomString generate?: null | RandomString
}>, }>,
@@ -247,7 +247,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const a = await getA(options) const a = await getA(options)
return { return {
spec: { spec: {
type: "text" as const, type: 'text' as const,
description: null, description: null,
warning: null, warning: null,
masked: false, masked: false,
@@ -255,7 +255,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
minLength: null, minLength: null,
maxLength: null, maxLength: null,
patterns: [], patterns: [],
inputmode: "text", inputmode: 'text',
disabled: false, disabled: false,
immutable: false, immutable: false,
generate: a.generate ?? null, generate: a.generate ?? null,
@@ -334,7 +334,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
minRows: 3, minRows: 3,
maxRows: 6, maxRows: 6,
placeholder: null, placeholder: null,
type: "textarea" as const, type: 'textarea' as const,
disabled: false, disabled: false,
immutable: a.immutable ?? false, immutable: a.immutable ?? false,
...a, ...a,
@@ -371,7 +371,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
minRows: 3, minRows: 3,
maxRows: 6, maxRows: 6,
placeholder: null, placeholder: null,
type: "textarea" as const, type: 'textarea' as const,
disabled: false, disabled: false,
immutable: false, immutable: false,
...a, ...a,
@@ -444,7 +444,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<AsRequired<number, Required>>( return new Value<AsRequired<number, Required>>(
() => ({ () => ({
spec: { spec: {
type: "number" as const, type: 'number' as const,
description: null, description: null,
warning: null, warning: null,
min: null, min: null,
@@ -482,7 +482,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const a = await getA(options) const a = await getA(options)
return { return {
spec: { spec: {
type: "number" as const, type: 'number' as const,
description: null, description: null,
warning: null, warning: null,
min: null, min: null,
@@ -540,7 +540,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<AsRequired<string, Required>>( return new Value<AsRequired<string, Required>>(
() => ({ () => ({
spec: { spec: {
type: "color" as const, type: 'color' as const,
description: null, description: null,
warning: null, warning: null,
disabled: false, disabled: false,
@@ -568,7 +568,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const a = await getA(options) const a = await getA(options)
return { return {
spec: { spec: {
type: "color" as const, type: 'color' as const,
description: null, description: null,
warning: null, warning: null,
disabled: false, disabled: false,
@@ -618,7 +618,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
* @description Informs the browser how to behave and which date/time component to display. * @description Informs the browser how to behave and which date/time component to display.
* @default "datetime-local" * @default "datetime-local"
*/ */
inputmode?: ValueSpecDatetime["inputmode"] inputmode?: ValueSpecDatetime['inputmode']
min?: string | null min?: string | null
max?: string | null max?: string | null
/** /**
@@ -631,10 +631,10 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<AsRequired<string, Required>>( return new Value<AsRequired<string, Required>>(
() => ({ () => ({
spec: { spec: {
type: "datetime" as const, type: 'datetime' as const,
description: null, description: null,
warning: null, warning: null,
inputmode: "datetime-local", inputmode: 'datetime-local',
min: null, min: null,
max: null, max: null,
step: null, step: null,
@@ -654,7 +654,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
warning?: string | null warning?: string | null
default: string | null default: string | null
required: Required required: Required
inputmode?: ValueSpecDatetime["inputmode"] inputmode?: ValueSpecDatetime['inputmode']
min?: string | null min?: string | null
max?: string | null max?: string | null
disabled?: false | string disabled?: false | string
@@ -665,10 +665,10 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const a = await getA(options) const a = await getA(options)
return { return {
spec: { spec: {
type: "datetime" as const, type: 'datetime' as const,
description: null, description: null,
warning: null, warning: null,
inputmode: "datetime-local", inputmode: 'datetime-local',
min: null, min: null,
max: null, max: null,
disabled: false, disabled: false,
@@ -740,7 +740,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
spec: { spec: {
description: null, description: null,
warning: null, warning: null,
type: "select" as const, type: 'select' as const,
disabled: false, disabled: false,
immutable: a.immutable ?? false, immutable: a.immutable ?? false,
...a, ...a,
@@ -766,7 +766,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
spec: { spec: {
description: null, description: null,
warning: null, warning: null,
type: "select" as const, type: 'select' as const,
disabled: false, disabled: false,
immutable: false, immutable: false,
...a, ...a,
@@ -837,7 +837,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<(keyof Values & string)[]>( return new Value<(keyof Values & string)[]>(
() => ({ () => ({
spec: { spec: {
type: "multiselect" as const, type: 'multiselect' as const,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
warning: null, warning: null,
@@ -867,7 +867,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const a = await getA(options) const a = await getA(options)
return { return {
spec: { spec: {
type: "multiselect" as const, type: 'multiselect' as const,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
warning: null, warning: null,
@@ -915,7 +915,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const built = await spec.build(options as any) const built = await spec.build(options as any)
return { return {
spec: { spec: {
type: "object" as const, type: 'object' as const,
description: null, description: null,
warning: null, warning: null,
...a, ...a,
@@ -933,7 +933,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
required: Required required: Required
}) { }) {
const buildValue = { const buildValue = {
type: "file" as const, type: 'file' as const,
description: null, description: null,
warning: null, warning: null,
...a, ...a,
@@ -960,7 +960,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<AsRequired<FileInfo, Required>, FileInfo | null>( return new Value<AsRequired<FileInfo, Required>, FileInfo | null>(
async (options) => { async (options) => {
const spec = { const spec = {
type: "file" as const, type: 'file' as const,
description: null, description: null,
warning: null, warning: null,
...(await a(options)), ...(await a(options)),
@@ -1034,7 +1034,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const built = await a.variants.build(options as any) const built = await a.variants.build(options as any)
return { return {
spec: { spec: {
type: "union" as const, type: 'union' as const,
description: null, description: null,
warning: null, warning: null,
disabled: false, disabled: false,
@@ -1109,7 +1109,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const built = await newValues.variants.build(options as any) const built = await newValues.variants.build(options as any)
return { return {
spec: { spec: {
type: "union" as const, type: 'union' as const,
description: null, description: null,
warning: null, warning: null,
...newValues, ...newValues,
@@ -1202,7 +1202,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<T, typeof parser._TYPE>(async () => { return new Value<T, typeof parser._TYPE>(async () => {
return { return {
spec: { spec: {
type: "hidden" as const, type: 'hidden' as const,
} as ValueSpecHidden, } as ValueSpecHidden,
validator: parser, validator: parser,
} }
@@ -1221,7 +1221,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const validator = await getParser(options) const validator = await getParser(options)
return { return {
spec: { spec: {
type: "hidden" as const, type: 'hidden' as const,
} as ValueSpecHidden, } as ValueSpecHidden,
validator, validator,
} }

View File

@@ -1,12 +1,12 @@
import { DeepPartial } from "../../../types" import { DeepPartial } from '../../../types'
import { ValueSpec, ValueSpecUnion } from "../inputSpecTypes" import { ValueSpec, ValueSpecUnion } from '../inputSpecTypes'
import { import {
LazyBuild, LazyBuild,
InputSpec, InputSpec,
ExtractInputSpecType, ExtractInputSpecType,
ExtractInputSpecStaticValidatedAs, ExtractInputSpecStaticValidatedAs,
} from "./inputSpec" } from './inputSpec'
import { Parser, any, anyOf, literal, object } from "ts-matches" import { Parser, any, anyOf, literal, object } from 'ts-matches'
export type UnionRes< export type UnionRes<
VariantValues extends { VariantValues extends {
@@ -19,10 +19,10 @@ export type UnionRes<
> = { > = {
[key in keyof VariantValues]: { [key in keyof VariantValues]: {
selection: key selection: key
value: ExtractInputSpecType<VariantValues[key]["spec"]> value: ExtractInputSpecType<VariantValues[key]['spec']>
other?: { other?: {
[key2 in Exclude<keyof VariantValues & string, key>]?: DeepPartial< [key2 in Exclude<keyof VariantValues & string, key>]?: DeepPartial<
ExtractInputSpecType<VariantValues[key2]["spec"]> ExtractInputSpecType<VariantValues[key2]['spec']>
> >
} }
} }
@@ -39,10 +39,10 @@ export type UnionResStaticValidatedAs<
> = { > = {
[key in keyof VariantValues]: { [key in keyof VariantValues]: {
selection: key selection: key
value: ExtractInputSpecStaticValidatedAs<VariantValues[key]["spec"]> value: ExtractInputSpecStaticValidatedAs<VariantValues[key]['spec']>
other?: { other?: {
[key2 in Exclude<keyof VariantValues & string, key>]?: DeepPartial< [key2 in Exclude<keyof VariantValues & string, key>]?: DeepPartial<
ExtractInputSpecStaticValidatedAs<VariantValues[key2]["spec"]> ExtractInputSpecStaticValidatedAs<VariantValues[key2]['spec']>
> >
} }
} }
@@ -106,7 +106,7 @@ export class Variants<
> { > {
private constructor( private constructor(
public build: LazyBuild<{ public build: LazyBuild<{
spec: ValueSpecUnion["variants"] spec: ValueSpecUnion['variants']
validator: Parser<unknown, UnionRes<VariantValues>> validator: Parser<unknown, UnionRes<VariantValues>>
}>, }>,
public readonly validator: Parser< public readonly validator: Parser<
@@ -126,7 +126,7 @@ export class Variants<
const staticValidators = {} as { const staticValidators = {} as {
[K in keyof VariantValues]: Parser< [K in keyof VariantValues]: Parser<
unknown, unknown,
ExtractInputSpecStaticValidatedAs<VariantValues[K]["spec"]> ExtractInputSpecStaticValidatedAs<VariantValues[K]['spec']>
> >
} }
for (const key in a) { for (const key in a) {
@@ -143,7 +143,7 @@ export class Variants<
const validators = {} as { const validators = {} as {
[K in keyof VariantValues]: Parser< [K in keyof VariantValues]: Parser<
unknown, unknown,
ExtractInputSpecType<VariantValues[K]["spec"]> ExtractInputSpecType<VariantValues[K]['spec']>
> >
} }
const variants = {} as { const variants = {} as {

View File

@@ -1,3 +1,3 @@
export * as constants from "./inputSpecConstants" export * as constants from './inputSpecConstants'
export * as types from "./inputSpecTypes" export * as types from './inputSpecTypes'
export * as builder from "./builder" export * as builder from './builder'

View File

@@ -1,8 +1,8 @@
import { SmtpValue } from "../../types" import { SmtpValue } from '../../types'
import { GetSystemSmtp, Patterns } from "../../util" import { GetSystemSmtp, Patterns } from '../../util'
import { InputSpec, InputSpecOf } from "./builder/inputSpec" import { InputSpec, InputSpecOf } from './builder/inputSpec'
import { Value } from "./builder/value" import { Value } from './builder/value'
import { Variants } from "./builder/variants" import { Variants } from './builder/variants'
/** /**
* Base SMTP settings, to be used by StartOS for system wide SMTP * Base SMTP settings, to be used by StartOS for system wide SMTP
@@ -11,12 +11,12 @@ export const customSmtp: InputSpec<SmtpValue> = InputSpec.of<
InputSpecOf<SmtpValue> InputSpecOf<SmtpValue>
>({ >({
server: Value.text({ server: Value.text({
name: "SMTP Server", name: 'SMTP Server',
required: true, required: true,
default: null, default: null,
}), }),
port: Value.number({ port: Value.number({
name: "Port", name: 'Port',
required: true, required: true,
default: 587, default: 587,
min: 1, min: 1,
@@ -24,20 +24,20 @@ export const customSmtp: InputSpec<SmtpValue> = InputSpec.of<
integer: true, integer: true,
}), }),
from: Value.text({ from: Value.text({
name: "From Address", name: 'From Address',
required: true, required: true,
default: null, default: null,
placeholder: "Example Name <test@example.com>", placeholder: 'Example Name <test@example.com>',
inputmode: "email", inputmode: 'email',
patterns: [Patterns.emailWithName], patterns: [Patterns.emailWithName],
}), }),
login: Value.text({ login: Value.text({
name: "Login", name: 'Login',
required: true, required: true,
default: null, default: null,
}), }),
password: Value.text({ password: Value.text({
name: "Password", name: 'Password',
required: false, required: false,
default: null, default: null,
masked: true, masked: true,
@@ -45,24 +45,24 @@ export const customSmtp: InputSpec<SmtpValue> = InputSpec.of<
}) })
const smtpVariants = Variants.of({ const smtpVariants = Variants.of({
disabled: { name: "Disabled", spec: InputSpec.of({}) }, disabled: { name: 'Disabled', spec: InputSpec.of({}) },
system: { system: {
name: "System Credentials", name: 'System Credentials',
spec: InputSpec.of({ spec: InputSpec.of({
customFrom: Value.text({ customFrom: Value.text({
name: "Custom From Address", name: 'Custom From Address',
description: description:
"A custom from address for this service. If not provided, the system from address will be used.", 'A custom from address for this service. If not provided, the system from address will be used.',
required: false, required: false,
default: null, default: null,
placeholder: "<name>test@example.com", placeholder: '<name>test@example.com',
inputmode: "email", inputmode: 'email',
patterns: [Patterns.email], patterns: [Patterns.email],
}), }),
}), }),
}, },
custom: { custom: {
name: "Custom Credentials", name: 'Custom Credentials',
spec: customSmtp, spec: customSmtp,
}, },
}) })
@@ -71,11 +71,11 @@ const smtpVariants = Variants.of({
*/ */
export const smtpInputSpec = Value.dynamicUnion(async ({ effects }) => { export const smtpInputSpec = Value.dynamicUnion(async ({ effects }) => {
const smtp = await new GetSystemSmtp(effects).once() const smtp = await new GetSystemSmtp(effects).once()
const disabled = smtp ? [] : ["system"] const disabled = smtp ? [] : ['system']
return { return {
name: "SMTP", name: 'SMTP',
description: "Optionally provide an SMTP server for sending emails", description: 'Optionally provide an SMTP server for sending emails',
default: "disabled", default: 'disabled',
disabled, disabled,
variants: smtpVariants, variants: smtpVariants,
} }

View File

@@ -1,18 +1,18 @@
export type InputSpec = Record<string, ValueSpec> export type InputSpec = Record<string, ValueSpec>
export type ValueType = export type ValueType =
| "text" | 'text'
| "textarea" | 'textarea'
| "number" | 'number'
| "color" | 'color'
| "datetime" | 'datetime'
| "toggle" | 'toggle'
| "select" | 'select'
| "multiselect" | 'multiselect'
| "list" | 'list'
| "object" | 'object'
| "file" | 'file'
| "union" | 'union'
| "hidden" | 'hidden'
export type ValueSpec = ValueSpecOf<ValueType> export type ValueSpec = ValueSpecOf<ValueType>
/** core spec types. These types provide the metadata for performing validations */ /** core spec types. These types provide the metadata for performing validations */
// prettier-ignore // prettier-ignore
@@ -37,13 +37,13 @@ export type ValueSpecText = {
description: string | null description: string | null
warning: string | null warning: string | null
type: "text" type: 'text'
patterns: Pattern[] patterns: Pattern[]
minLength: number | null minLength: number | null
maxLength: number | null maxLength: number | null
masked: boolean masked: boolean
inputmode: "text" | "email" | "tel" | "url" inputmode: 'text' | 'email' | 'tel' | 'url'
placeholder: string | null placeholder: string | null
required: boolean required: boolean
@@ -57,7 +57,7 @@ export type ValueSpecTextarea = {
description: string | null description: string | null
warning: string | null warning: string | null
type: "textarea" type: 'textarea'
patterns: Pattern[] patterns: Pattern[]
placeholder: string | null placeholder: string | null
minLength: number | null minLength: number | null
@@ -71,7 +71,7 @@ export type ValueSpecTextarea = {
} }
export type ValueSpecNumber = { export type ValueSpecNumber = {
type: "number" type: 'number'
min: number | null min: number | null
max: number | null max: number | null
integer: boolean integer: boolean
@@ -91,7 +91,7 @@ export type ValueSpecColor = {
description: string | null description: string | null
warning: string | null warning: string | null
type: "color" type: 'color'
required: boolean required: boolean
default: string | null default: string | null
disabled: false | string disabled: false | string
@@ -101,9 +101,9 @@ export type ValueSpecDatetime = {
name: string name: string
description: string | null description: string | null
warning: string | null warning: string | null
type: "datetime" type: 'datetime'
required: boolean required: boolean
inputmode: "date" | "time" | "datetime-local" inputmode: 'date' | 'time' | 'datetime-local'
min: string | null min: string | null
max: string | null max: string | null
default: string | null default: string | null
@@ -115,7 +115,7 @@ export type ValueSpecSelect = {
name: string name: string
description: string | null description: string | null
warning: string | null warning: string | null
type: "select" type: 'select'
default: string | null default: string | null
disabled: false | string | string[] disabled: false | string | string[]
immutable: boolean immutable: boolean
@@ -127,7 +127,7 @@ export type ValueSpecMultiselect = {
description: string | null description: string | null
warning: string | null warning: string | null
type: "multiselect" type: 'multiselect'
minLength: number | null minLength: number | null
maxLength: number | null maxLength: number | null
disabled: false | string | string[] disabled: false | string | string[]
@@ -139,7 +139,7 @@ export type ValueSpecToggle = {
description: string | null description: string | null
warning: string | null warning: string | null
type: "toggle" type: 'toggle'
default: boolean | null default: boolean | null
disabled: false | string disabled: false | string
immutable: boolean immutable: boolean
@@ -149,7 +149,7 @@ export type ValueSpecUnion = {
description: string | null description: string | null
warning: string | null warning: string | null
type: "union" type: 'union'
variants: Record< variants: Record<
string, string,
{ {
@@ -165,7 +165,7 @@ export type ValueSpecFile = {
name: string name: string
description: string | null description: string | null
warning: string | null warning: string | null
type: "file" type: 'file'
extensions: string[] extensions: string[]
required: boolean required: boolean
} }
@@ -173,13 +173,13 @@ export type ValueSpecObject = {
name: string name: string
description: string | null description: string | null
warning: string | null warning: string | null
type: "object" type: 'object'
spec: InputSpec spec: InputSpec
} }
export type ValueSpecHidden = { export type ValueSpecHidden = {
type: "hidden" type: 'hidden'
} }
export type ListValueSpecType = "text" | "object" export type ListValueSpecType = 'text' | 'object'
// prettier-ignore // prettier-ignore
export type ListValueSpecOf<T extends ListValueSpecType> = export type ListValueSpecOf<T extends ListValueSpecType> =
T extends "text" ? ListValueSpecText : T extends "text" ? ListValueSpecText :
@@ -190,7 +190,7 @@ export type ValueSpecListOf<T extends ListValueSpecType> = {
name: string name: string
description: string | null description: string | null
warning: string | null warning: string | null
type: "list" type: 'list'
spec: ListValueSpecOf<T> spec: ListValueSpecOf<T>
minLength: number | null minLength: number | null
maxLength: number | null maxLength: number | null
@@ -208,18 +208,18 @@ export type Pattern = {
description: string description: string
} }
export type ListValueSpecText = { export type ListValueSpecText = {
type: "text" type: 'text'
patterns: Pattern[] patterns: Pattern[]
minLength: number | null minLength: number | null
maxLength: number | null maxLength: number | null
masked: boolean masked: boolean
generate: null | RandomString generate: null | RandomString
inputmode: "text" | "email" | "tel" | "url" inputmode: 'text' | 'email' | 'tel' | 'url'
placeholder: string | null placeholder: string | null
} }
export type ListValueSpecObject = { export type ListValueSpecObject = {
type: "object" type: 'object'
spec: InputSpec spec: InputSpec
uniqueBy: UniqueBy uniqueBy: UniqueBy
displayAs: string | null displayAs: string | null
@@ -244,5 +244,5 @@ export function isValueSpecListOf<S extends ListValueSpecType>(
t: ValueSpec, t: ValueSpec,
s: S, s: S,
): t is ValueSpecListOf<S> & { spec: ListValueSpecOf<S> } { ): t is ValueSpecListOf<S> & { spec: ListValueSpecOf<S> } {
return "spec" in t && t.spec.type === s return 'spec' in t && t.spec.type === s
} }

View File

@@ -1,16 +1,16 @@
import { InputSpec } from "./input/builder" import { InputSpec } from './input/builder'
import { ExtractInputSpecType } from "./input/builder/inputSpec" import { ExtractInputSpecType } from './input/builder/inputSpec'
import * as T from "../types" import * as T from '../types'
import { once } from "../util" import { once } from '../util'
import { InitScript } from "../inits" import { InitScript } from '../inits'
import { Parser } from "ts-matches" import { Parser } from 'ts-matches'
type MaybeInputSpec<Type> = {} extends Type ? null : InputSpec<Type> type MaybeInputSpec<Type> = {} extends Type ? null : InputSpec<Type>
export type Run<A extends Record<string, any>> = (options: { export type Run<A extends Record<string, any>> = (options: {
effects: T.Effects effects: T.Effects
input: A input: A
spec: T.inputSpecTypes.InputSpec spec: T.inputSpecTypes.InputSpec
}) => Promise<(T.ActionResult & { version: "1" }) | null | void | undefined> }) => Promise<(T.ActionResult & { version: '1' }) | null | void | undefined>
export type GetInput<A extends Record<string, any>> = (options: { export type GetInput<A extends Record<string, any>> = (options: {
effects: T.Effects effects: T.Effects
}) => Promise<null | void | undefined | T.DeepPartial<A>> }) => Promise<null | void | undefined | T.DeepPartial<A>>
@@ -65,7 +65,7 @@ export class Action<Id extends T.ActionId, Type extends Record<string, any>>
InputSpecType extends InputSpec<Record<string, any>>, InputSpecType extends InputSpec<Record<string, any>>,
>( >(
id: Id, id: Id,
metadata: MaybeFn<Omit<T.ActionMetadata, "hasInput">>, metadata: MaybeFn<Omit<T.ActionMetadata, 'hasInput'>>,
inputSpec: InputSpecType, inputSpec: InputSpecType,
getInput: GetInput<ExtractInputSpecType<InputSpecType>>, getInput: GetInput<ExtractInputSpecType<InputSpecType>>,
run: Run<ExtractInputSpecType<InputSpecType>>, run: Run<ExtractInputSpecType<InputSpecType>>,
@@ -80,7 +80,7 @@ export class Action<Id extends T.ActionId, Type extends Record<string, any>>
} }
static withoutInput<Id extends T.ActionId>( static withoutInput<Id extends T.ActionId>(
id: Id, id: Id,
metadata: MaybeFn<Omit<T.ActionMetadata, "hasInput">>, metadata: MaybeFn<Omit<T.ActionMetadata, 'hasInput'>>,
run: Run<{}>, run: Run<{}>,
): Action<Id, {}> { ): Action<Id, {}> {
return new Action( return new Action(
@@ -156,7 +156,7 @@ export class Actions<
} }
addAction<A extends Action<T.ActionId, any>>( addAction<A extends Action<T.ActionId, any>>(
action: A, // TODO: prevent duplicates action: A, // TODO: prevent duplicates
): Actions<AllActions & { [id in A["id"]]: A }> { ): Actions<AllActions & { [id in A['id']]: A }> {
return new Actions({ ...this.actions, [action.id]: action }) return new Actions({ ...this.actions, [action.id]: action })
} }
async init(effects: T.Effects): Promise<void> { async init(effects: T.Effects): Promise<void> {

View File

@@ -1,11 +1,11 @@
import { ExtendedVersion, VersionRange } from "../exver" import { ExtendedVersion, VersionRange } from '../exver'
import { import {
PackageId, PackageId,
HealthCheckId, HealthCheckId,
DependencyRequirement, DependencyRequirement,
CheckDependenciesResult, CheckDependenciesResult,
} from "../types" } from '../types'
import { Effects } from "../Effects" import { Effects } from '../Effects'
export type CheckDependencies<DependencyId extends PackageId = PackageId> = { export type CheckDependencies<DependencyId extends PackageId = PackageId> = {
infoFor: (packageId: DependencyId) => { infoFor: (packageId: DependencyId) => {
@@ -73,11 +73,11 @@ export async function checkDependencies<
} }
const runningSatisfied = (packageId: DependencyId) => { const runningSatisfied = (packageId: DependencyId) => {
const dep = infoFor(packageId) const dep = infoFor(packageId)
return dep.requirement.kind !== "running" || dep.result.isRunning return dep.requirement.kind !== 'running' || dep.result.isRunning
} }
const tasksSatisfied = (packageId: DependencyId) => const tasksSatisfied = (packageId: DependencyId) =>
Object.entries(infoFor(packageId).result.tasks).filter( Object.entries(infoFor(packageId).result.tasks).filter(
([_, t]) => t?.active && t.task.severity === "critical", ([_, t]) => t?.active && t.task.severity === 'critical',
).length === 0 ).length === 0
const healthCheckSatisfied = ( const healthCheckSatisfied = (
packageId: DependencyId, packageId: DependencyId,
@@ -86,17 +86,17 @@ export async function checkDependencies<
const dep = infoFor(packageId) const dep = infoFor(packageId)
if ( if (
healthCheckId && healthCheckId &&
(dep.requirement.kind !== "running" || (dep.requirement.kind !== 'running' ||
!dep.requirement.healthChecks.includes(healthCheckId)) !dep.requirement.healthChecks.includes(healthCheckId))
) { ) {
throw new Error(`Unknown HealthCheckId ${healthCheckId}`) throw new Error(`Unknown HealthCheckId ${healthCheckId}`)
} }
const errors = const errors =
dep.requirement.kind === "running" dep.requirement.kind === 'running'
? dep.requirement.healthChecks ? dep.requirement.healthChecks
.map((id) => [id, dep.result.healthChecks[id] ?? null] as const) .map((id) => [id, dep.result.healthChecks[id] ?? null] as const)
.filter(([id, _]) => (healthCheckId ? id === healthCheckId : true)) .filter(([id, _]) => (healthCheckId ? id === healthCheckId : true))
.filter(([_, res]) => res?.result !== "success") .filter(([_, res]) => res?.result !== 'success')
: [] : []
return errors.length === 0 return errors.length === 0
} }
@@ -138,7 +138,7 @@ export async function checkDependencies<
} }
const throwIfRunningNotSatisfied = (packageId: DependencyId) => { const throwIfRunningNotSatisfied = (packageId: DependencyId) => {
const dep = infoFor(packageId) const dep = infoFor(packageId)
if (dep.requirement.kind === "running" && !dep.result.isRunning) { if (dep.requirement.kind === 'running' && !dep.result.isRunning) {
throw new Error(`${dep.result.title || packageId} is not running`) throw new Error(`${dep.result.title || packageId} is not running`)
} }
return null return null
@@ -146,11 +146,11 @@ export async function checkDependencies<
const throwIfTasksNotSatisfied = (packageId: DependencyId) => { const throwIfTasksNotSatisfied = (packageId: DependencyId) => {
const dep = infoFor(packageId) const dep = infoFor(packageId)
const reqs = Object.entries(dep.result.tasks) const reqs = Object.entries(dep.result.tasks)
.filter(([_, t]) => t?.active && t.task.severity === "critical") .filter(([_, t]) => t?.active && t.task.severity === 'critical')
.map(([id, _]) => id) .map(([id, _]) => id)
if (reqs.length) { if (reqs.length) {
throw new Error( throw new Error(
`The following action requests have not been fulfilled: ${reqs.join(", ")}`, `The following action requests have not been fulfilled: ${reqs.join(', ')}`,
) )
} }
return null return null
@@ -162,27 +162,27 @@ export async function checkDependencies<
const dep = infoFor(packageId) const dep = infoFor(packageId)
if ( if (
healthCheckId && healthCheckId &&
(dep.requirement.kind !== "running" || (dep.requirement.kind !== 'running' ||
!dep.requirement.healthChecks.includes(healthCheckId)) !dep.requirement.healthChecks.includes(healthCheckId))
) { ) {
throw new Error(`Unknown HealthCheckId ${healthCheckId}`) throw new Error(`Unknown HealthCheckId ${healthCheckId}`)
} }
const errors = const errors =
dep.requirement.kind === "running" dep.requirement.kind === 'running'
? dep.requirement.healthChecks ? dep.requirement.healthChecks
.map((id) => [id, dep.result.healthChecks[id] ?? null] as const) .map((id) => [id, dep.result.healthChecks[id] ?? null] as const)
.filter(([id, _]) => (healthCheckId ? id === healthCheckId : true)) .filter(([id, _]) => (healthCheckId ? id === healthCheckId : true))
.filter(([_, res]) => res?.result !== "success") .filter(([_, res]) => res?.result !== 'success')
: [] : []
if (errors.length) { if (errors.length) {
throw new Error( throw new Error(
errors errors
.map(([id, e]) => .map(([id, e]) =>
e e
? `Health Check ${e.name} of ${dep.result.title || packageId} failed with status ${e.result}${e.message ? `: ${e.message}` : ""}` ? `Health Check ${e.name} of ${dep.result.title || packageId} failed with status ${e.result}${e.message ? `: ${e.message}` : ''}`
: `Health Check ${id} of ${dep.result.title} does not exist`, : `Health Check ${id} of ${dep.result.title} does not exist`,
) )
.join("; "), .join('; '),
) )
} }
return null return null
@@ -209,7 +209,7 @@ export async function checkDependencies<
return [] return []
}) })
if (err.length) { if (err.length) {
throw new Error(err.join("; ")) throw new Error(err.join('; '))
} }
return null return null
})() })()

View File

@@ -1,27 +1,27 @@
import * as T from "../types" import * as T from '../types'
import { once } from "../util" import { once } from '../util'
export type RequiredDependenciesOf<Manifest extends T.SDKManifest> = { export type RequiredDependenciesOf<Manifest extends T.SDKManifest> = {
[K in keyof Manifest["dependencies"]]: Exclude< [K in keyof Manifest['dependencies']]: Exclude<
Manifest["dependencies"][K], Manifest['dependencies'][K],
undefined undefined
>["optional"] extends false >['optional'] extends false
? K ? K
: never : never
}[keyof Manifest["dependencies"]] }[keyof Manifest['dependencies']]
export type OptionalDependenciesOf<Manifest extends T.SDKManifest> = Exclude< export type OptionalDependenciesOf<Manifest extends T.SDKManifest> = Exclude<
keyof Manifest["dependencies"], keyof Manifest['dependencies'],
RequiredDependenciesOf<Manifest> RequiredDependenciesOf<Manifest>
> >
type DependencyRequirement = type DependencyRequirement =
| { | {
kind: "running" kind: 'running'
healthChecks: Array<T.HealthCheckId> healthChecks: Array<T.HealthCheckId>
versionRange: string versionRange: string
} }
| { | {
kind: "exists" kind: 'exists'
versionRange: string versionRange: string
} }
type Matches<T, U> = T extends U ? (U extends T ? null : never) : never type Matches<T, U> = T extends U ? (U extends T ? null : never) : never

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
import { DeepMap } from "deep-equality-data-structures" import { DeepMap } from 'deep-equality-data-structures'
import * as P from "./exver" import * as P from './exver'
// prettier-ignore // prettier-ignore
export type ValidateVersion<T extends String> = export type ValidateVersion<T extends String> =
@@ -22,35 +22,35 @@ export type ValidateExVers<T> =
never[] never[]
type Anchor = { type Anchor = {
type: "Anchor" type: 'Anchor'
operator: P.CmpOp operator: P.CmpOp
version: ExtendedVersion version: ExtendedVersion
} }
type And = { type And = {
type: "And" type: 'And'
left: VersionRange left: VersionRange
right: VersionRange right: VersionRange
} }
type Or = { type Or = {
type: "Or" type: 'Or'
left: VersionRange left: VersionRange
right: VersionRange right: VersionRange
} }
type Not = { type Not = {
type: "Not" type: 'Not'
value: VersionRange value: VersionRange
} }
type Flavor = { type Flavor = {
type: "Flavor" type: 'Flavor'
flavor: string | null flavor: string | null
} }
type FlavorNot = { type FlavorNot = {
type: "FlavorNot" type: 'FlavorNot'
flavors: Set<string | null> flavors: Set<string | null>
} }
@@ -107,8 +107,8 @@ function adjacentVersionRangePoints(
} }
function flavorAnd(a: FlavorAtom, b: FlavorAtom): FlavorAtom | null { function flavorAnd(a: FlavorAtom, b: FlavorAtom): FlavorAtom | null {
if (a.type == "Flavor") { if (a.type == 'Flavor') {
if (b.type == "Flavor") { if (b.type == 'Flavor') {
if (a.flavor == b.flavor) { if (a.flavor == b.flavor) {
return a return a
} else { } else {
@@ -122,7 +122,7 @@ function flavorAnd(a: FlavorAtom, b: FlavorAtom): FlavorAtom | null {
} }
} }
} else { } else {
if (b.type == "Flavor") { if (b.type == 'Flavor') {
if (a.flavors.has(b.flavor)) { if (a.flavors.has(b.flavor)) {
return null return null
} else { } else {
@@ -131,7 +131,7 @@ function flavorAnd(a: FlavorAtom, b: FlavorAtom): FlavorAtom | null {
} else { } else {
// TODO: use Set.union if targeting esnext or later // TODO: use Set.union if targeting esnext or later
return { return {
type: "FlavorNot", type: 'FlavorNot',
flavors: new Set([...a.flavors, ...b.flavors]), flavors: new Set([...a.flavors, ...b.flavors]),
} }
} }
@@ -218,12 +218,12 @@ class VersionRangeTable {
static eqFlavor(flavor: string | null): VersionRangeTables { static eqFlavor(flavor: string | null): VersionRangeTables {
return new DeepMap([ return new DeepMap([
[ [
{ type: "Flavor", flavor } as FlavorAtom, { type: 'Flavor', flavor } as FlavorAtom,
new VersionRangeTable([], [true]), new VersionRangeTable([], [true]),
], ],
// make sure the truth table is exhaustive, or `not` will not work properly. // make sure the truth table is exhaustive, or `not` will not work properly.
[ [
{ type: "FlavorNot", flavors: new Set([flavor]) } as FlavorAtom, { type: 'FlavorNot', flavors: new Set([flavor]) } as FlavorAtom,
new VersionRangeTable([], [false]), new VersionRangeTable([], [false]),
], ],
]) ])
@@ -241,12 +241,12 @@ class VersionRangeTable {
): VersionRangeTables { ): VersionRangeTables {
return new DeepMap([ return new DeepMap([
[ [
{ type: "Flavor", flavor } as FlavorAtom, { type: 'Flavor', flavor } as FlavorAtom,
new VersionRangeTable([point], [left, right]), new VersionRangeTable([point], [left, right]),
], ],
// make sure the truth table is exhaustive, or `not` will not work properly. // make sure the truth table is exhaustive, or `not` will not work properly.
[ [
{ type: "FlavorNot", flavors: new Set([flavor]) } as FlavorAtom, { type: 'FlavorNot', flavors: new Set([flavor]) } as FlavorAtom,
new VersionRangeTable([], [false]), new VersionRangeTable([], [false]),
], ],
]) ])
@@ -383,7 +383,7 @@ class VersionRangeTable {
let sum_terms: VersionRange[] = [] let sum_terms: VersionRange[] = []
for (let [flavor, table] of tables) { for (let [flavor, table] of tables) {
let cmp_flavor = null let cmp_flavor = null
if (flavor.type == "Flavor") { if (flavor.type == 'Flavor') {
cmp_flavor = flavor.flavor cmp_flavor = flavor.flavor
} }
for (let i = 0; i < table.values.length; i++) { for (let i = 0; i < table.values.length; i++) {
@@ -392,7 +392,7 @@ class VersionRangeTable {
continue continue
} }
if (flavor.type == "FlavorNot") { if (flavor.type == 'FlavorNot') {
for (let not_flavor of flavor.flavors) { for (let not_flavor of flavor.flavors) {
term.push(VersionRange.flavor(not_flavor).not()) term.push(VersionRange.flavor(not_flavor).not())
} }
@@ -410,7 +410,7 @@ class VersionRangeTable {
if (p != null && q != null && adjacentVersionRangePoints(p, q)) { if (p != null && q != null && adjacentVersionRangePoints(p, q)) {
term.push( term.push(
VersionRange.anchor( VersionRange.anchor(
"=", '=',
new ExtendedVersion(cmp_flavor, p.upstream, p.downstream), new ExtendedVersion(cmp_flavor, p.upstream, p.downstream),
), ),
) )
@@ -418,7 +418,7 @@ class VersionRangeTable {
if (p != null && p.side < 0) { if (p != null && p.side < 0) {
term.push( term.push(
VersionRange.anchor( VersionRange.anchor(
">=", '>=',
new ExtendedVersion(cmp_flavor, p.upstream, p.downstream), new ExtendedVersion(cmp_flavor, p.upstream, p.downstream),
), ),
) )
@@ -426,7 +426,7 @@ class VersionRangeTable {
if (p != null && p.side >= 0) { if (p != null && p.side >= 0) {
term.push( term.push(
VersionRange.anchor( VersionRange.anchor(
">", '>',
new ExtendedVersion(cmp_flavor, p.upstream, p.downstream), new ExtendedVersion(cmp_flavor, p.upstream, p.downstream),
), ),
) )
@@ -434,7 +434,7 @@ class VersionRangeTable {
if (q != null && q.side < 0) { if (q != null && q.side < 0) {
term.push( term.push(
VersionRange.anchor( VersionRange.anchor(
"<", '<',
new ExtendedVersion(cmp_flavor, q.upstream, q.downstream), new ExtendedVersion(cmp_flavor, q.upstream, q.downstream),
), ),
) )
@@ -442,7 +442,7 @@ class VersionRangeTable {
if (q != null && q.side >= 0) { if (q != null && q.side >= 0) {
term.push( term.push(
VersionRange.anchor( VersionRange.anchor(
"<=", '<=',
new ExtendedVersion(cmp_flavor, q.upstream, q.downstream), new ExtendedVersion(cmp_flavor, q.upstream, q.downstream),
), ),
) )
@@ -463,26 +463,26 @@ class VersionRangeTable {
export class VersionRange { export class VersionRange {
constructor(public atom: Anchor | And | Or | Not | P.Any | P.None | Flavor) {} constructor(public atom: Anchor | And | Or | Not | P.Any | P.None | Flavor) {}
toStringParens(parent: "And" | "Or" | "Not") { toStringParens(parent: 'And' | 'Or' | 'Not') {
let needs = true let needs = true
switch (this.atom.type) { switch (this.atom.type) {
case "And": case 'And':
case "Or": case 'Or':
needs = parent != this.atom.type needs = parent != this.atom.type
break break
case "Anchor": case 'Anchor':
case "Any": case 'Any':
case "None": case 'None':
needs = parent == "Not" needs = parent == 'Not'
break break
case "Not": case 'Not':
case "Flavor": case 'Flavor':
needs = false needs = false
break break
} }
if (needs) { if (needs) {
return "(" + this.toString() + ")" return '(' + this.toString() + ')'
} else { } else {
return this.toString() return this.toString()
} }
@@ -490,36 +490,36 @@ export class VersionRange {
toString(): string { toString(): string {
switch (this.atom.type) { switch (this.atom.type) {
case "Anchor": case 'Anchor':
return `${this.atom.operator}${this.atom.version}` return `${this.atom.operator}${this.atom.version}`
case "And": case 'And':
return `${this.atom.left.toStringParens(this.atom.type)} && ${this.atom.right.toStringParens(this.atom.type)}` return `${this.atom.left.toStringParens(this.atom.type)} && ${this.atom.right.toStringParens(this.atom.type)}`
case "Or": case 'Or':
return `${this.atom.left.toStringParens(this.atom.type)} || ${this.atom.right.toStringParens(this.atom.type)}` return `${this.atom.left.toStringParens(this.atom.type)} || ${this.atom.right.toStringParens(this.atom.type)}`
case "Not": case 'Not':
return `!${this.atom.value.toStringParens(this.atom.type)}` return `!${this.atom.value.toStringParens(this.atom.type)}`
case "Flavor": case 'Flavor':
return this.atom.flavor == null ? `#` : `#${this.atom.flavor}` return this.atom.flavor == null ? `#` : `#${this.atom.flavor}`
case "Any": case 'Any':
return "*" return '*'
case "None": case 'None':
return "!" return '!'
} }
} }
private static parseAtom(atom: P.VersionRangeAtom): VersionRange { private static parseAtom(atom: P.VersionRangeAtom): VersionRange {
switch (atom.type) { switch (atom.type) {
case "Not": case 'Not':
return new VersionRange({ return new VersionRange({
type: "Not", type: 'Not',
value: VersionRange.parseAtom(atom.value), value: VersionRange.parseAtom(atom.value),
}) })
case "Parens": case 'Parens':
return VersionRange.parseRange(atom.expr) return VersionRange.parseRange(atom.expr)
case "Anchor": case 'Anchor':
return new VersionRange({ return new VersionRange({
type: "Anchor", type: 'Anchor',
operator: atom.operator || "^", operator: atom.operator || '^',
version: new ExtendedVersion( version: new ExtendedVersion(
atom.version.flavor, atom.version.flavor,
new Version( new Version(
@@ -532,7 +532,7 @@ export class VersionRange {
), ),
), ),
}) })
case "Flavor": case 'Flavor':
return VersionRange.flavor(atom.flavor) return VersionRange.flavor(atom.flavor)
default: default:
return new VersionRange(atom) return new VersionRange(atom)
@@ -543,17 +543,17 @@ export class VersionRange {
let result = VersionRange.parseAtom(range[0]) let result = VersionRange.parseAtom(range[0])
for (const next of range[1]) { for (const next of range[1]) {
switch (next[1]?.[0]) { switch (next[1]?.[0]) {
case "||": case '||':
result = new VersionRange({ result = new VersionRange({
type: "Or", type: 'Or',
left: result, left: result,
right: VersionRange.parseAtom(next[2]), right: VersionRange.parseAtom(next[2]),
}) })
break break
case "&&": case '&&':
default: default:
result = new VersionRange({ result = new VersionRange({
type: "And", type: 'And',
left: result, left: result,
right: VersionRange.parseAtom(next[2]), right: VersionRange.parseAtom(next[2]),
}) })
@@ -565,49 +565,49 @@ export class VersionRange {
static parse(range: string): VersionRange { static parse(range: string): VersionRange {
return VersionRange.parseRange( return VersionRange.parseRange(
P.parse(range, { startRule: "VersionRange" }), P.parse(range, { startRule: 'VersionRange' }),
) )
} }
static anchor(operator: P.CmpOp, version: ExtendedVersion) { static anchor(operator: P.CmpOp, version: ExtendedVersion) {
return new VersionRange({ type: "Anchor", operator, version }) return new VersionRange({ type: 'Anchor', operator, version })
} }
static flavor(flavor: string | null) { static flavor(flavor: string | null) {
return new VersionRange({ type: "Flavor", flavor }) return new VersionRange({ type: 'Flavor', flavor })
} }
static parseEmver(range: string): VersionRange { static parseEmver(range: string): VersionRange {
return VersionRange.parseRange( return VersionRange.parseRange(
P.parse(range, { startRule: "EmverVersionRange" }), P.parse(range, { startRule: 'EmverVersionRange' }),
) )
} }
and(right: VersionRange) { and(right: VersionRange) {
return new VersionRange({ type: "And", left: this, right }) return new VersionRange({ type: 'And', left: this, right })
} }
or(right: VersionRange) { or(right: VersionRange) {
return new VersionRange({ type: "Or", left: this, right }) return new VersionRange({ type: 'Or', left: this, right })
} }
not() { not() {
return new VersionRange({ type: "Not", value: this }) return new VersionRange({ type: 'Not', value: this })
} }
static and(...xs: Array<VersionRange>) { static and(...xs: Array<VersionRange>) {
let y = VersionRange.any() let y = VersionRange.any()
for (let x of xs) { for (let x of xs) {
if (x.atom.type == "Any") { if (x.atom.type == 'Any') {
continue continue
} }
if (x.atom.type == "None") { if (x.atom.type == 'None') {
return x return x
} }
if (y.atom.type == "Any") { if (y.atom.type == 'Any') {
y = x y = x
} else { } else {
y = new VersionRange({ type: "And", left: y, right: x }) y = new VersionRange({ type: 'And', left: y, right: x })
} }
} }
return y return y
@@ -616,27 +616,27 @@ export class VersionRange {
static or(...xs: Array<VersionRange>) { static or(...xs: Array<VersionRange>) {
let y = VersionRange.none() let y = VersionRange.none()
for (let x of xs) { for (let x of xs) {
if (x.atom.type == "None") { if (x.atom.type == 'None') {
continue continue
} }
if (x.atom.type == "Any") { if (x.atom.type == 'Any') {
return x return x
} }
if (y.atom.type == "None") { if (y.atom.type == 'None') {
y = x y = x
} else { } else {
y = new VersionRange({ type: "Or", left: y, right: x }) y = new VersionRange({ type: 'Or', left: y, right: x })
} }
} }
return y return y
} }
static any() { static any() {
return new VersionRange({ type: "Any" }) return new VersionRange({ type: 'Any' })
} }
static none() { static none() {
return new VersionRange({ type: "None" }) return new VersionRange({ type: 'None' })
} }
satisfiedBy(version: Version | ExtendedVersion) { satisfiedBy(version: Version | ExtendedVersion) {
@@ -645,23 +645,23 @@ export class VersionRange {
tables(): VersionRangeTables { tables(): VersionRangeTables {
switch (this.atom.type) { switch (this.atom.type) {
case "Anchor": case 'Anchor':
switch (this.atom.operator) { switch (this.atom.operator) {
case "=": case '=':
// `=1.2.3` is equivalent to `>=1.2.3 && <=1.2.4 && #flavor` // `=1.2.3` is equivalent to `>=1.2.3 && <=1.2.4 && #flavor`
return VersionRangeTable.and( return VersionRangeTable.and(
VersionRangeTable.cmp(this.atom.version, -1, false, true), VersionRangeTable.cmp(this.atom.version, -1, false, true),
VersionRangeTable.cmp(this.atom.version, 1, true, false), VersionRangeTable.cmp(this.atom.version, 1, true, false),
) )
case ">": case '>':
return VersionRangeTable.cmp(this.atom.version, 1, false, true) return VersionRangeTable.cmp(this.atom.version, 1, false, true)
case "<": case '<':
return VersionRangeTable.cmp(this.atom.version, -1, true, false) return VersionRangeTable.cmp(this.atom.version, -1, true, false)
case ">=": case '>=':
return VersionRangeTable.cmp(this.atom.version, -1, false, true) return VersionRangeTable.cmp(this.atom.version, -1, false, true)
case "<=": case '<=':
return VersionRangeTable.cmp(this.atom.version, 1, true, false) return VersionRangeTable.cmp(this.atom.version, 1, true, false)
case "!=": case '!=':
// `!=1.2.3` is equivalent to `!(>=1.2.3 && <=1.2.3 && #flavor)` // `!=1.2.3` is equivalent to `!(>=1.2.3 && <=1.2.3 && #flavor)`
// **not** equivalent to `(<1.2.3 || >1.2.3) && #flavor` // **not** equivalent to `(<1.2.3 || >1.2.3) && #flavor`
return VersionRangeTable.not( return VersionRangeTable.not(
@@ -670,7 +670,7 @@ export class VersionRange {
VersionRangeTable.cmp(this.atom.version, 1, true, false), VersionRangeTable.cmp(this.atom.version, 1, true, false),
), ),
) )
case "^": case '^':
// `^1.2.3` is equivalent to `>=1.2.3 && <2.0.0 && #flavor` // `^1.2.3` is equivalent to `>=1.2.3 && <2.0.0 && #flavor`
return VersionRangeTable.and( return VersionRangeTable.and(
VersionRangeTable.cmp(this.atom.version, -1, false, true), VersionRangeTable.cmp(this.atom.version, -1, false, true),
@@ -681,7 +681,7 @@ export class VersionRange {
false, false,
), ),
) )
case "~": case '~':
// `~1.2.3` is equivalent to `>=1.2.3 && <1.3.0 && #flavor` // `~1.2.3` is equivalent to `>=1.2.3 && <1.3.0 && #flavor`
return VersionRangeTable.and( return VersionRangeTable.and(
VersionRangeTable.cmp(this.atom.version, -1, false, true), VersionRangeTable.cmp(this.atom.version, -1, false, true),
@@ -693,23 +693,23 @@ export class VersionRange {
), ),
) )
} }
case "Flavor": case 'Flavor':
return VersionRangeTable.eqFlavor(this.atom.flavor) return VersionRangeTable.eqFlavor(this.atom.flavor)
case "Not": case 'Not':
return VersionRangeTable.not(this.atom.value.tables()) return VersionRangeTable.not(this.atom.value.tables())
case "And": case 'And':
return VersionRangeTable.and( return VersionRangeTable.and(
this.atom.left.tables(), this.atom.left.tables(),
this.atom.right.tables(), this.atom.right.tables(),
) )
case "Or": case 'Or':
return VersionRangeTable.or( return VersionRangeTable.or(
this.atom.left.tables(), this.atom.left.tables(),
this.atom.right.tables(), this.atom.right.tables(),
) )
case "Any": case 'Any':
return true return true
case "None": case 'None':
return false return false
} }
} }
@@ -734,23 +734,23 @@ export class Version {
) {} ) {}
toString(): string { toString(): string {
return `${this.number.join(".")}${this.prerelease.length > 0 ? `-${this.prerelease.join(".")}` : ""}` return `${this.number.join('.')}${this.prerelease.length > 0 ? `-${this.prerelease.join('.')}` : ''}`
} }
compare(other: Version): "greater" | "equal" | "less" { compare(other: Version): 'greater' | 'equal' | 'less' {
const numLen = Math.max(this.number.length, other.number.length) const numLen = Math.max(this.number.length, other.number.length)
for (let i = 0; i < numLen; i++) { for (let i = 0; i < numLen; i++) {
if ((this.number[i] || 0) > (other.number[i] || 0)) { if ((this.number[i] || 0) > (other.number[i] || 0)) {
return "greater" return 'greater'
} else if ((this.number[i] || 0) < (other.number[i] || 0)) { } else if ((this.number[i] || 0) < (other.number[i] || 0)) {
return "less" return 'less'
} }
} }
if (this.prerelease.length === 0 && other.prerelease.length !== 0) { if (this.prerelease.length === 0 && other.prerelease.length !== 0) {
return "greater" return 'greater'
} else if (this.prerelease.length !== 0 && other.prerelease.length === 0) { } else if (this.prerelease.length !== 0 && other.prerelease.length === 0) {
return "less" return 'less'
} }
const prereleaseLen = Math.max( const prereleaseLen = Math.max(
@@ -760,42 +760,42 @@ export class Version {
for (let i = 0; i < prereleaseLen; i++) { for (let i = 0; i < prereleaseLen; i++) {
if (typeof this.prerelease[i] === typeof other.prerelease[i]) { if (typeof this.prerelease[i] === typeof other.prerelease[i]) {
if (this.prerelease[i] > other.prerelease[i]) { if (this.prerelease[i] > other.prerelease[i]) {
return "greater" return 'greater'
} else if (this.prerelease[i] < other.prerelease[i]) { } else if (this.prerelease[i] < other.prerelease[i]) {
return "less" return 'less'
} }
} else { } else {
switch (`${typeof this.prerelease[1]}:${typeof other.prerelease[i]}`) { switch (`${typeof this.prerelease[1]}:${typeof other.prerelease[i]}`) {
case "number:string": case 'number:string':
return "less" return 'less'
case "string:number": case 'string:number':
return "greater" return 'greater'
case "number:undefined": case 'number:undefined':
case "string:undefined": case 'string:undefined':
return "greater" return 'greater'
case "undefined:number": case 'undefined:number':
case "undefined:string": case 'undefined:string':
return "less" return 'less'
} }
} }
} }
return "equal" return 'equal'
} }
compareForSort(other: Version): -1 | 0 | 1 { compareForSort(other: Version): -1 | 0 | 1 {
switch (this.compare(other)) { switch (this.compare(other)) {
case "greater": case 'greater':
return 1 return 1
case "equal": case 'equal':
return 0 return 0
case "less": case 'less':
return -1 return -1
} }
} }
static parse(version: string): Version { static parse(version: string): Version {
const parsed = P.parse(version, { startRule: "Version" }) const parsed = P.parse(version, { startRule: 'Version' })
return new Version(parsed.number, parsed.prerelease) return new Version(parsed.number, parsed.prerelease)
} }
@@ -815,25 +815,25 @@ export class ExtendedVersion {
) {} ) {}
toString(): string { toString(): string {
return `${this.flavor ? `#${this.flavor}:` : ""}${this.upstream.toString()}:${this.downstream.toString()}` return `${this.flavor ? `#${this.flavor}:` : ''}${this.upstream.toString()}:${this.downstream.toString()}`
} }
compare(other: ExtendedVersion): "greater" | "equal" | "less" | null { compare(other: ExtendedVersion): 'greater' | 'equal' | 'less' | null {
if (this.flavor !== other.flavor) { if (this.flavor !== other.flavor) {
return null return null
} }
const upstreamCmp = this.upstream.compare(other.upstream) const upstreamCmp = this.upstream.compare(other.upstream)
if (upstreamCmp !== "equal") { if (upstreamCmp !== 'equal') {
return upstreamCmp return upstreamCmp
} }
return this.downstream.compare(other.downstream) return this.downstream.compare(other.downstream)
} }
compareLexicographic(other: ExtendedVersion): "greater" | "equal" | "less" { compareLexicographic(other: ExtendedVersion): 'greater' | 'equal' | 'less' {
if ((this.flavor || "") > (other.flavor || "")) { if ((this.flavor || '') > (other.flavor || '')) {
return "greater" return 'greater'
} else if ((this.flavor || "") > (other.flavor || "")) { } else if ((this.flavor || '') > (other.flavor || '')) {
return "less" return 'less'
} else { } else {
return this.compare(other)! return this.compare(other)!
} }
@@ -841,37 +841,37 @@ export class ExtendedVersion {
compareForSort(other: ExtendedVersion): 1 | 0 | -1 { compareForSort(other: ExtendedVersion): 1 | 0 | -1 {
switch (this.compareLexicographic(other)) { switch (this.compareLexicographic(other)) {
case "greater": case 'greater':
return 1 return 1
case "equal": case 'equal':
return 0 return 0
case "less": case 'less':
return -1 return -1
} }
} }
greaterThan(other: ExtendedVersion): boolean { greaterThan(other: ExtendedVersion): boolean {
return this.compare(other) === "greater" return this.compare(other) === 'greater'
} }
greaterThanOrEqual(other: ExtendedVersion): boolean { greaterThanOrEqual(other: ExtendedVersion): boolean {
return ["greater", "equal"].includes(this.compare(other) as string) return ['greater', 'equal'].includes(this.compare(other) as string)
} }
equals(other: ExtendedVersion): boolean { equals(other: ExtendedVersion): boolean {
return this.compare(other) === "equal" return this.compare(other) === 'equal'
} }
lessThan(other: ExtendedVersion): boolean { lessThan(other: ExtendedVersion): boolean {
return this.compare(other) === "less" return this.compare(other) === 'less'
} }
lessThanOrEqual(other: ExtendedVersion): boolean { lessThanOrEqual(other: ExtendedVersion): boolean {
return ["less", "equal"].includes(this.compare(other) as string) return ['less', 'equal'].includes(this.compare(other) as string)
} }
static parse(extendedVersion: string): ExtendedVersion { static parse(extendedVersion: string): ExtendedVersion {
const parsed = P.parse(extendedVersion, { startRule: "ExtendedVersion" }) const parsed = P.parse(extendedVersion, { startRule: 'ExtendedVersion' })
return new ExtendedVersion( return new ExtendedVersion(
parsed.flavor || null, parsed.flavor || null,
new Version(parsed.upstream.number, parsed.upstream.prerelease), new Version(parsed.upstream.number, parsed.upstream.prerelease),
@@ -881,7 +881,7 @@ export class ExtendedVersion {
static parseEmver(extendedVersion: string): ExtendedVersion { static parseEmver(extendedVersion: string): ExtendedVersion {
try { try {
const parsed = P.parse(extendedVersion, { startRule: "Emver" }) const parsed = P.parse(extendedVersion, { startRule: 'Emver' })
return new ExtendedVersion( return new ExtendedVersion(
parsed.flavor || null, parsed.flavor || null,
new Version(parsed.upstream.number, parsed.upstream.prerelease), new Version(parsed.upstream.number, parsed.upstream.prerelease),
@@ -956,22 +956,22 @@ export class ExtendedVersion {
*/ */
satisfies(versionRange: VersionRange): boolean { satisfies(versionRange: VersionRange): boolean {
switch (versionRange.atom.type) { switch (versionRange.atom.type) {
case "Anchor": case 'Anchor':
const otherVersion = versionRange.atom.version const otherVersion = versionRange.atom.version
switch (versionRange.atom.operator) { switch (versionRange.atom.operator) {
case "=": case '=':
return this.equals(otherVersion) return this.equals(otherVersion)
case ">": case '>':
return this.greaterThan(otherVersion) return this.greaterThan(otherVersion)
case "<": case '<':
return this.lessThan(otherVersion) return this.lessThan(otherVersion)
case ">=": case '>=':
return this.greaterThanOrEqual(otherVersion) return this.greaterThanOrEqual(otherVersion)
case "<=": case '<=':
return this.lessThanOrEqual(otherVersion) return this.lessThanOrEqual(otherVersion)
case "!=": case '!=':
return !this.equals(otherVersion) return !this.equals(otherVersion)
case "^": case '^':
const nextMajor = versionRange.atom.version.incrementMajor() const nextMajor = versionRange.atom.version.incrementMajor()
if ( if (
this.greaterThanOrEqual(otherVersion) && this.greaterThanOrEqual(otherVersion) &&
@@ -981,7 +981,7 @@ export class ExtendedVersion {
} else { } else {
return false return false
} }
case "~": case '~':
const nextMinor = versionRange.atom.version.incrementMinor() const nextMinor = versionRange.atom.version.incrementMinor()
if ( if (
this.greaterThanOrEqual(otherVersion) && this.greaterThanOrEqual(otherVersion) &&
@@ -992,23 +992,23 @@ export class ExtendedVersion {
return false return false
} }
} }
case "Flavor": case 'Flavor':
return versionRange.atom.flavor == this.flavor return versionRange.atom.flavor == this.flavor
case "And": case 'And':
return ( return (
this.satisfies(versionRange.atom.left) && this.satisfies(versionRange.atom.left) &&
this.satisfies(versionRange.atom.right) this.satisfies(versionRange.atom.right)
) )
case "Or": case 'Or':
return ( return (
this.satisfies(versionRange.atom.left) || this.satisfies(versionRange.atom.left) ||
this.satisfies(versionRange.atom.right) this.satisfies(versionRange.atom.right)
) )
case "Not": case 'Not':
return !this.satisfies(versionRange.atom.value) return !this.satisfies(versionRange.atom.value)
case "Any": case 'Any':
return true return true
case "None": case 'None':
return false return false
} }
} }
@@ -1020,34 +1020,34 @@ export const testTypeVersion = <T extends string>(t: T & ValidateVersion<T>) =>
t t
function tests() { function tests() {
testTypeVersion("1.2.3") testTypeVersion('1.2.3')
testTypeVersion("1") testTypeVersion('1')
testTypeVersion("12.34.56") testTypeVersion('12.34.56')
testTypeVersion("1.2-3") testTypeVersion('1.2-3')
testTypeVersion("1-3") testTypeVersion('1-3')
testTypeVersion("1-alpha") testTypeVersion('1-alpha')
// @ts-expect-error // @ts-expect-error
testTypeVersion("-3") testTypeVersion('-3')
// @ts-expect-error // @ts-expect-error
testTypeVersion("1.2.3:1") testTypeVersion('1.2.3:1')
// @ts-expect-error // @ts-expect-error
testTypeVersion("#cat:1:1") testTypeVersion('#cat:1:1')
testTypeExVer("1.2.3:1.2.3") testTypeExVer('1.2.3:1.2.3')
testTypeExVer("1.2.3.4.5.6.7.8.9.0:1") testTypeExVer('1.2.3.4.5.6.7.8.9.0:1')
testTypeExVer("100:1") testTypeExVer('100:1')
testTypeExVer("#cat:1:1") testTypeExVer('#cat:1:1')
testTypeExVer("1.2.3.4.5.6.7.8.9.11.22.33:1") testTypeExVer('1.2.3.4.5.6.7.8.9.11.22.33:1')
testTypeExVer("1-0:1") testTypeExVer('1-0:1')
testTypeExVer("1-0:1") testTypeExVer('1-0:1')
// @ts-expect-error // @ts-expect-error
testTypeExVer("1.2-3") testTypeExVer('1.2-3')
// @ts-expect-error // @ts-expect-error
testTypeExVer("1-3") testTypeExVer('1-3')
// @ts-expect-error // @ts-expect-error
testTypeExVer("1.2.3.4.5.6.7.8.9.0.10:1" as string) testTypeExVer('1.2.3.4.5.6.7.8.9.0.10:1' as string)
// @ts-expect-error // @ts-expect-error
testTypeExVer("1.-2:1") testTypeExVer('1.-2:1')
// @ts-expect-error // @ts-expect-error
testTypeExVer("1..2.3:3") testTypeExVer('1..2.3:3')
} }

View File

@@ -1,13 +1,13 @@
export { S9pk } from "./s9pk" export { S9pk } from './s9pk'
export { VersionRange, ExtendedVersion, Version } from "./exver" export { VersionRange, ExtendedVersion, Version } from './exver'
export * as inputSpec from "./actions/input" export * as inputSpec from './actions/input'
export * as ISB from "./actions/input/builder" export * as ISB from './actions/input/builder'
export * as IST from "./actions/input/inputSpecTypes" export * as IST from './actions/input/inputSpecTypes'
export * as types from "./types" export * as types from './types'
export * as T from "./types" export * as T from './types'
export * as yaml from "yaml" export * as yaml from 'yaml'
export * as inits from "./inits" export * as inits from './inits'
export * as matches from "ts-matches" export * as matches from 'ts-matches'
export * as utils from "./util" export * as utils from './util'

View File

@@ -1,2 +1,2 @@
export * from "./setupInit" export * from './setupInit'
export * from "./setupUninit" export * from './setupUninit'

View File

@@ -1,8 +1,8 @@
import { VersionRange } from "../../../base/lib/exver" import { VersionRange } from '../../../base/lib/exver'
import * as T from "../../../base/lib/types" import * as T from '../../../base/lib/types'
import { once } from "../util" import { once } from '../util'
export type InitKind = "install" | "update" | "restore" | null export type InitKind = 'install' | 'update' | 'restore' | null
export type InitFn<Kind extends InitKind = InitKind> = ( export type InitFn<Kind extends InitKind = InitKind> = (
effects: T.Effects, effects: T.Effects,
@@ -31,7 +31,7 @@ export function setupInit(...inits: InitScriptOrFn[]): T.ExpectedExports.init {
complete.then(() => fn()).catch(console.error), complete.then(() => fn()).catch(console.error),
) )
try { try {
if ("init" in init) await init.init(e, opts.kind) if ('init' in init) await init.init(e, opts.kind)
else await init(e, opts.kind) else await init(e, opts.kind)
} finally { } finally {
res() res()
@@ -43,7 +43,7 @@ export function setupInit(...inits: InitScriptOrFn[]): T.ExpectedExports.init {
} }
export function setupOnInit(onInit: InitScriptOrFn): InitScript { export function setupOnInit(onInit: InitScriptOrFn): InitScript {
return "init" in onInit return 'init' in onInit
? onInit ? onInit
: { : {
init: async (effects, kind) => { init: async (effects, kind) => {

View File

@@ -1,5 +1,5 @@
import { ExtendedVersion, VersionRange } from "../../../base/lib/exver" import { ExtendedVersion, VersionRange } from '../../../base/lib/exver'
import * as T from "../../../base/lib/types" import * as T from '../../../base/lib/types'
export type UninitFn = ( export type UninitFn = (
effects: T.Effects, effects: T.Effects,
@@ -34,14 +34,14 @@ export function setupUninit(
): T.ExpectedExports.uninit { ): T.ExpectedExports.uninit {
return async (opts) => { return async (opts) => {
for (const uninit of uninits) { for (const uninit of uninits) {
if ("uninit" in uninit) await uninit.uninit(opts.effects, opts.target) if ('uninit' in uninit) await uninit.uninit(opts.effects, opts.target)
else await uninit(opts.effects, opts.target) else await uninit(opts.effects, opts.target)
} }
} }
} }
export function setupOnUninit(onUninit: UninitScriptOrFn): UninitScript { export function setupOnUninit(onUninit: UninitScriptOrFn): UninitScript {
return "uninit" in onUninit return 'uninit' in onUninit
? onUninit ? onUninit
: { : {
uninit: async (effects, target) => { uninit: async (effects, target) => {

View File

@@ -1,10 +1,10 @@
import { object, string } from "ts-matches" import { object, string } from 'ts-matches'
import { Effects } from "../Effects" import { Effects } from '../Effects'
import { Origin } from "./Origin" import { Origin } from './Origin'
import { AddSslOptions, BindParams } from "../osBindings" import { AddSslOptions, BindParams } from '../osBindings'
import { Security } from "../osBindings" import { Security } from '../osBindings'
import { BindOptions } from "../osBindings" import { BindOptions } from '../osBindings'
import { AlpnInfo } from "../osBindings" import { AlpnInfo } from '../osBindings'
export { AddSslOptions, Security, BindOptions } export { AddSslOptions, Security, BindOptions }
@@ -12,8 +12,8 @@ export const knownProtocols = {
http: { http: {
secure: null, secure: null,
defaultPort: 80, defaultPort: 80,
withSsl: "https", withSsl: 'https',
alpn: { specified: ["http/1.1"] } as AlpnInfo, alpn: { specified: ['http/1.1'] } as AlpnInfo,
}, },
https: { https: {
secure: { ssl: true }, secure: { ssl: true },
@@ -22,8 +22,8 @@ export const knownProtocols = {
ws: { ws: {
secure: null, secure: null,
defaultPort: 80, defaultPort: 80,
withSsl: "wss", withSsl: 'wss',
alpn: { specified: ["http/1.1"] } as AlpnInfo, alpn: { specified: ['http/1.1'] } as AlpnInfo,
}, },
wss: { wss: {
secure: { ssl: true }, secure: { ssl: true },
@@ -140,8 +140,8 @@ export class MultiHost {
addXForwardedHeaders: false, addXForwardedHeaders: false,
preferredExternalPort: knownProtocols[sslProto].defaultPort, preferredExternalPort: knownProtocols[sslProto].defaultPort,
scheme: sslProto, scheme: sslProto,
alpn: "alpn" in protoInfo ? protoInfo.alpn : null, alpn: 'alpn' in protoInfo ? protoInfo.alpn : null,
...("addSsl" in options ? options.addSsl : null), ...('addSsl' in options ? options.addSsl : null),
} }
: options.addSsl : options.addSsl
? { ? {
@@ -149,7 +149,7 @@ export class MultiHost {
preferredExternalPort: 443, preferredExternalPort: 443,
scheme: sslProto, scheme: sslProto,
alpn: null, alpn: null,
...("addSsl" in options ? options.addSsl : null), ...('addSsl' in options ? options.addSsl : null),
} }
: null : null
@@ -169,8 +169,8 @@ export class MultiHost {
private getSslProto(options: BindOptionsByKnownProtocol) { private getSslProto(options: BindOptionsByKnownProtocol) {
const proto = options.protocol const proto = options.protocol
const protoInfo = knownProtocols[proto] const protoInfo = knownProtocols[proto]
if (inObject("noAddSsl", options) && options.noAddSsl) return null if (inObject('noAddSsl', options) && options.noAddSsl) return null
if ("withSsl" in protoInfo && protoInfo.withSsl) return protoInfo.withSsl if ('withSsl' in protoInfo && protoInfo.withSsl) return protoInfo.withSsl
if (protoInfo.secure?.ssl) return proto if (protoInfo.secure?.ssl) return proto
return null return null
} }

View File

@@ -1,7 +1,7 @@
import { AddressInfo } from "../types" import { AddressInfo } from '../types'
import { AddressReceipt } from "./AddressReceipt" import { AddressReceipt } from './AddressReceipt'
import { MultiHost, Scheme } from "./Host" import { MultiHost, Scheme } from './Host'
import { ServiceInterfaceBuilder } from "./ServiceInterfaceBuilder" import { ServiceInterfaceBuilder } from './ServiceInterfaceBuilder'
export class Origin { export class Origin {
constructor( constructor(
@@ -21,9 +21,9 @@ export class Origin {
.map( .map(
([key, val]) => `${encodeURIComponent(key)}=${encodeURIComponent(val)}`, ([key, val]) => `${encodeURIComponent(key)}=${encodeURIComponent(val)}`,
) )
.join("&") .join('&')
const qp = qpEntries.length ? `?${qpEntries}` : "" const qp = qpEntries.length ? `?${qpEntries}` : ''
return { return {
hostId: this.host.options.id, hostId: this.host.options.id,

View File

@@ -1,6 +1,6 @@
import { ServiceInterfaceType } from "../types" import { ServiceInterfaceType } from '../types'
import { Effects } from "../Effects" import { Effects } from '../Effects'
import { Scheme } from "./Host" import { Scheme } from './Host'
/** /**
* A helper class for creating a Network Interface * A helper class for creating a Network Interface

View File

@@ -1,6 +1,6 @@
import * as T from "../types" import * as T from '../types'
import { once } from "../util" import { once } from '../util'
import { AddressReceipt } from "./AddressReceipt" import { AddressReceipt } from './AddressReceipt'
declare const UpdateServiceInterfacesProof: unique symbol declare const UpdateServiceInterfacesProof: unique symbol
export type UpdateServiceInterfacesReceipt = { export type UpdateServiceInterfacesReceipt = {

View File

@@ -1,5 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { AnyVerifyingKey } from "./AnyVerifyingKey" import type { AnyVerifyingKey } from './AnyVerifyingKey'
export type AcceptSigners = export type AcceptSigners =
| { signer: AnyVerifyingKey } | { signer: AnyVerifyingKey }

View File

@@ -1,5 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Guid } from "./Guid" import type { Guid } from './Guid'
export type ActionInput = { export type ActionInput = {
eventId: Guid eventId: Guid

View File

@@ -1,6 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { ActionVisibility } from "./ActionVisibility" import type { ActionVisibility } from './ActionVisibility'
import type { AllowedStatuses } from "./AllowedStatuses" import type { AllowedStatuses } from './AllowedStatuses'
export type ActionMetadata = { export type ActionMetadata = {
/** /**

View File

@@ -1,7 +1,7 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { ActionResultV0 } from "./ActionResultV0" import type { ActionResultV0 } from './ActionResultV0'
import type { ActionResultV1 } from "./ActionResultV1" import type { ActionResultV1 } from './ActionResultV1'
export type ActionResult = export type ActionResult =
| ({ version: "0" } & ActionResultV0) | ({ version: '0' } & ActionResultV0)
| ({ version: "1" } & ActionResultV1) | ({ version: '1' } & ActionResultV1)

View File

@@ -11,7 +11,7 @@ export type ActionResultMember = {
description: string | null description: string | null
} & ( } & (
| { | {
type: "single" type: 'single'
/** /**
* The actual string value to display * The actual string value to display
*/ */
@@ -30,7 +30,7 @@ export type ActionResultMember = {
masked: boolean masked: boolean
} }
| { | {
type: "group" type: 'group'
/** /**
* An new group of nested values, experienced by the user as an accordion dropdown * An new group of nested values, experienced by the user as an accordion dropdown
*/ */

View File

@@ -1,5 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { ActionResultValue } from "./ActionResultValue" import type { ActionResultValue } from './ActionResultValue'
export type ActionResultV1 = { export type ActionResultV1 = {
/** /**

View File

@@ -1,9 +1,9 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { ActionResultMember } from "./ActionResultMember" import type { ActionResultMember } from './ActionResultMember'
export type ActionResultValue = export type ActionResultValue =
| { | {
type: "single" type: 'single'
/** /**
* The actual string value to display * The actual string value to display
*/ */
@@ -22,7 +22,7 @@ export type ActionResultValue =
masked: boolean masked: boolean
} }
| { | {
type: "group" type: 'group'
/** /**
* An new group of nested values, experienced by the user as an accordion dropdown * An new group of nested values, experienced by the user as an accordion dropdown
*/ */

View File

@@ -1,3 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type ActionVisibility = "hidden" | { disabled: string } | "enabled" export type ActionVisibility = 'hidden' | { disabled: string } | 'enabled'

View File

@@ -1,4 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Guid } from "./Guid" import type { Guid } from './Guid'
export type AddAdminParams = { signer: Guid } export type AddAdminParams = { signer: Guid }

View File

@@ -1,6 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { AnySignature } from "./AnySignature" import type { AnySignature } from './AnySignature'
import type { Blake3Commitment } from "./Blake3Commitment" import type { Blake3Commitment } from './Blake3Commitment'
export type AddAssetParams = { export type AddAssetParams = {
version: string version: string

View File

@@ -1,4 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { LocaleString } from "./LocaleString" import type { LocaleString } from './LocaleString'
export type AddCategoryParams = { id: string; name: LocaleString } export type AddCategoryParams = { id: string; name: LocaleString }

View File

@@ -1,6 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { AnySignature } from "./AnySignature" import type { AnySignature } from './AnySignature'
import type { MerkleArchiveCommitment } from "./MerkleArchiveCommitment" import type { MerkleArchiveCommitment } from './MerkleArchiveCommitment'
export type AddMirrorParams = { export type AddMirrorParams = {
url: string url: string

View File

@@ -1,6 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { AnySignature } from "./AnySignature" import type { AnySignature } from './AnySignature'
import type { MerkleArchiveCommitment } from "./MerkleArchiveCommitment" import type { MerkleArchiveCommitment } from './MerkleArchiveCommitment'
export type AddPackageParams = { export type AddPackageParams = {
urls: string[] urls: string[]

View File

@@ -1,6 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Guid } from "./Guid" import type { Guid } from './Guid'
import type { PackageId } from "./PackageId" import type { PackageId } from './PackageId'
export type AddPackageSignerParams = { export type AddPackageSignerParams = {
id: PackageId id: PackageId

View File

@@ -1,4 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PackageId } from "./PackageId" import type { PackageId } from './PackageId'
export type AddPackageToCategoryParams = { id: string; package: PackageId } export type AddPackageToCategoryParams = { id: string; package: PackageId }

View File

@@ -1,5 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { AlpnInfo } from "./AlpnInfo" import type { AlpnInfo } from './AlpnInfo'
export type AddSslOptions = { export type AddSslOptions = {
preferredExternalPort: number preferredExternalPort: number

View File

@@ -1,5 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { HostId } from "./HostId" import type { HostId } from './HostId'
export type AddressInfo = { export type AddressInfo = {
username: string | null username: string | null

View File

@@ -1,5 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { LocaleString } from "./LocaleString" import type { LocaleString } from './LocaleString'
export type Alerts = { export type Alerts = {
install: LocaleString | null install: LocaleString | null

View File

@@ -1,3 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type Algorithm = "ecdsa" | "ed25519" export type Algorithm = 'ecdsa' | 'ed25519'

View File

@@ -1,5 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PackageDataEntry } from "./PackageDataEntry" import type { PackageDataEntry } from './PackageDataEntry'
import type { PackageId } from "./PackageId" import type { PackageId } from './PackageId'
export type AllPackageData = { [key: PackageId]: PackageDataEntry } export type AllPackageData = { [key: PackageId]: PackageDataEntry }

View File

@@ -1,3 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type AllowedStatuses = "only-running" | "only-stopped" | "any" export type AllowedStatuses = 'only-running' | 'only-stopped' | 'any'

View File

@@ -1,4 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { MaybeUtf8String } from "./MaybeUtf8String" import type { MaybeUtf8String } from './MaybeUtf8String'
export type AlpnInfo = "reflect" | { specified: Array<MaybeUtf8String> } export type AlpnInfo = 'reflect' | { specified: Array<MaybeUtf8String> }

View File

@@ -1,3 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type ApiState = "error" | "initializing" | "running" export type ApiState = 'error' | 'initializing' | 'running'

View File

@@ -1,5 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { EncryptedWire } from "./EncryptedWire" import type { EncryptedWire } from './EncryptedWire'
export type AttachParams = { export type AttachParams = {
password: EncryptedWire | null password: EncryptedWire | null

View File

@@ -1,7 +1,7 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { BlockDev } from "./BlockDev" import type { BlockDev } from './BlockDev'
import type { Cifs } from "./Cifs" import type { Cifs } from './Cifs'
export type BackupTargetFS = export type BackupTargetFS =
| ({ type: "disk" } & BlockDev) | ({ type: 'disk' } & BlockDev)
| ({ type: "cifs" } & Cifs) | ({ type: 'cifs' } & Cifs)

View File

@@ -1,4 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { HostId } from "./HostId" import type { HostId } from './HostId'
export type BindId = { id: HostId; internalPort: number } export type BindId = { id: HostId; internalPort: number }

View File

@@ -1,5 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { BindOptions } from "./BindOptions" import type { BindOptions } from './BindOptions'
import type { NetInfo } from "./NetInfo" import type { NetInfo } from './NetInfo'
export type BindInfo = { enabled: boolean; options: BindOptions; net: NetInfo } export type BindInfo = { enabled: boolean; options: BindOptions; net: NetInfo }

View File

@@ -1,6 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { AddSslOptions } from "./AddSslOptions" import type { AddSslOptions } from './AddSslOptions'
import type { Security } from "./Security" import type { Security } from './Security'
export type BindOptions = { export type BindOptions = {
preferredExternalPort: number preferredExternalPort: number

View File

@@ -1,7 +1,7 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { AddSslOptions } from "./AddSslOptions" import type { AddSslOptions } from './AddSslOptions'
import type { HostId } from "./HostId" import type { HostId } from './HostId'
import type { Security } from "./Security" import type { Security } from './Security'
export type BindParams = { export type BindParams = {
id: HostId id: HostId

View File

@@ -1,5 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { GatewayId } from "./GatewayId" import type { GatewayId } from './GatewayId'
export type BindingGatewaySetEnabledParams = { export type BindingGatewaySetEnabledParams = {
internalPort: number internalPort: number

View File

@@ -1,4 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Base64 } from "./Base64" import type { Base64 } from './Base64'
export type Blake3Commitment = { hash: Base64; size: number } export type Blake3Commitment = { hash: Base64; size: number }

View File

@@ -1,4 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { LocaleString } from "./LocaleString" import type { LocaleString } from './LocaleString'
export type Category = { name: LocaleString } export type Category = { name: LocaleString }

View File

@@ -1,4 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PackageId } from "./PackageId" import type { PackageId } from './PackageId'
export type CheckDependenciesParam = { packageIds?: Array<PackageId> } export type CheckDependenciesParam = { packageIds?: Array<PackageId> }

View File

@@ -1,10 +1,10 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { HealthCheckId } from "./HealthCheckId" import type { HealthCheckId } from './HealthCheckId'
import type { NamedHealthCheckResult } from "./NamedHealthCheckResult" import type { NamedHealthCheckResult } from './NamedHealthCheckResult'
import type { PackageId } from "./PackageId" import type { PackageId } from './PackageId'
import type { ReplayId } from "./ReplayId" import type { ReplayId } from './ReplayId'
import type { TaskEntry } from "./TaskEntry" import type { TaskEntry } from './TaskEntry'
import type { Version } from "./Version" import type { Version } from './Version'
export type CheckDependenciesResult = { export type CheckDependenciesResult = {
packageId: PackageId packageId: PackageId

View File

@@ -1,4 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { ActionId } from "./ActionId" import type { ActionId } from './ActionId'
export type ClearActionsParams = { except: Array<ActionId> } export type ClearActionsParams = { except: Array<ActionId> }

View File

@@ -1,4 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { BindId } from "./BindId" import type { BindId } from './BindId'
export type ClearBindingsParams = { except: Array<BindId> } export type ClearBindingsParams = { except: Array<BindId> }

Some files were not shown because too many files have changed in this diff Show More