Compare commits

..

34 Commits

Author SHA1 Message Date
Aiden McClelland
a81c01b232 fix grub config 2026-01-27 15:27:49 -07:00
Aiden McClelland
c96a5b7754 fix install over 0.3.5.1 2026-01-26 14:16:11 -07:00
Matt Hill
b39760d9d7 add i18n helper to sdk 2026-01-22 16:07:18 -07:00
Aiden McClelland
2f4bb1e35e fix device migration 2026-01-22 10:27:49 -07:00
Aiden McClelland
0534b5813b ignore missing package archive on 035 migration 2026-01-21 15:58:46 -07:00
Aiden McClelland
3333416331 omit live medium from disk list and better space management 2026-01-21 13:33:52 -07:00
Aiden McClelland
50540e4847 version bump 2026-01-21 13:02:46 -07:00
Aiden McClelland
35545056e7 (mostly) redundant localization on frontend 2026-01-21 12:46:32 -07:00
Aiden McClelland
3828b03790 working setup flow + manifest localization 2026-01-20 18:28:28 -07:00
Matt Hill
6a1c1fde06 revert mock 2026-01-20 17:09:01 -07:00
Matt Hill
2e5cd4b8ca ability to shutdown after install 2026-01-20 17:08:40 -07:00
Matt Hill
99727e132c keyboard keymap also 2026-01-20 14:24:45 -07:00
Matt Hill
0a0f0850d7 fix dns selection 2026-01-19 17:23:29 -07:00
Alex Inkin
65fc3e5c52 feat: add "Add new gateway" option (#3098)
* feat: add "Add new gateway" option

* Update web/projects/ui/src/app/routes/portal/components/form/controls/select.component.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* add translation

---------

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Matt Hill <mattnine@protonmail.com>
2026-01-17 21:37:30 -07:00
Aiden McClelland
0d4ddc3451 help text for args 2026-01-16 19:09:41 -07:00
Aiden McClelland
4ee72d560a fix missing about text 2026-01-16 17:13:33 -07:00
Aiden McClelland
9d364b0691 Merge branch 'feature/consolidate-setup' of github.com:Start9Labs/start-os into feature/consolidate-setup 2026-01-16 17:03:36 -07:00
Aiden McClelland
d786424353 translate backend strings 2026-01-16 17:03:34 -07:00
Matt Hill
5ecb230bcc revert mock 2026-01-16 16:25:36 -07:00
Matt Hill
fee03ef407 switch to posix strings for language internal 2026-01-16 16:25:08 -07:00
Matt Hill
8ca3d56aa9 remove start-tunnel readme 2026-01-16 15:41:28 -07:00
Aiden McClelland
763c7d9f87 wip: localization 2026-01-16 11:49:06 -07:00
Matt Hill
ea86117e5f fix typo 2026-01-16 01:36:16 -07:00
Matt Hill
708b273b42 finish setup wizard and ui language-keyboard feature 2026-01-15 23:49:24 -07:00
Matt Hill
db344386ef only warn on update if breakages (#3097) 2026-01-15 13:33:42 -07:00
Matt Hill
d3048c59e8 better ST messaging on setup 2026-01-15 13:14:49 -07:00
Matt Hill
5e5aa5d830 use dialogservice wrapper 2026-01-15 13:14:49 -07:00
Matt Hill
880aa8040d translations 2026-01-15 13:14:49 -07:00
Matt Hill
93fda28393 fix translation 2026-01-15 13:13:33 -07:00
Matt Hill
3fba55a54d undo mock 2026-01-15 13:03:53 -07:00
Matt Hill
075ed97c96 use http 2026-01-15 13:02:21 -07:00
Matt Hill
42ef2bdf7e combine install and setup and refactor all 2026-01-15 13:02:21 -07:00
Aiden McClelland
645083913c add start-cli flash-os 2026-01-15 13:02:21 -07:00
Aiden McClelland
02bce4ed61 start consolidating 2026-01-15 13:02:21 -07:00
354 changed files with 6238 additions and 8094 deletions

View File

@@ -1,5 +0,0 @@
{
"attribution": {
"commit": ""
}
}

View File

@@ -1,81 +0,0 @@
name: Setup Build Environment
description: Common build environment setup steps
inputs:
nodejs-version:
description: Node.js version
required: true
setup-python:
description: Set up Python
required: false
default: "false"
setup-docker:
description: Set up Docker QEMU and Buildx
required: false
default: "true"
setup-sccache:
description: Configure sccache for GitHub Actions
required: false
default: "true"
free-space:
description: Remove unnecessary packages to free disk space
required: false
default: "true"
runs:
using: composite
steps:
- name: Free disk space
if: inputs.free-space == 'true'
shell: bash
run: |
sudo apt-get remove --purge -y azure-cli || true
sudo apt-get remove --purge -y firefox || true
sudo apt-get remove --purge -y ghc-* || true
sudo apt-get remove --purge -y google-cloud-sdk || true
sudo apt-get remove --purge -y google-chrome-stable || true
sudo apt-get remove --purge -y powershell || true
sudo apt-get remove --purge -y php* || true
sudo apt-get remove --purge -y ruby* || true
sudo apt-get remove --purge -y mono-* || true
sudo apt-get autoremove -y
sudo apt-get clean
sudo rm -rf /usr/lib/jvm
sudo rm -rf /usr/local/.ghcup
sudo rm -rf /usr/local/lib/android
sudo rm -rf /usr/share/dotnet
sudo rm -rf /usr/share/swift
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
# BuildJet runners lack /opt/hostedtoolcache, which setup-python and setup-qemu expect
- name: Ensure hostedtoolcache exists
shell: bash
run: sudo mkdir -p /opt/hostedtoolcache && sudo chown $USER:$USER /opt/hostedtoolcache
- name: Set up Python
if: inputs.setup-python == 'true'
uses: actions/setup-python@v5
with:
python-version: "3.x"
- uses: actions/setup-node@v4
with:
node-version: ${{ inputs.nodejs-version }}
cache: npm
cache-dependency-path: "**/package-lock.json"
- name: Set up Docker QEMU
if: inputs.setup-docker == 'true'
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
if: inputs.setup-docker == 'true'
uses: docker/setup-buildx-action@v3
- name: Configure sccache
if: inputs.setup-sccache == 'true'
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');

View File

@@ -37,10 +37,6 @@ on:
- master - master
- next/* - next/*
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: true
env: env:
NODEJS_VERSION: "24.11.0" NODEJS_VERSION: "24.11.0"
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}' ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
@@ -48,7 +44,6 @@ env:
jobs: jobs:
compile: compile:
name: Build Debian Package name: Build Debian Package
if: github.event.pull_request.draft != true
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
@@ -65,15 +60,50 @@ jobs:
}} }}
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }} runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
steps: steps:
- name: Mount tmpfs - name: Cleaning up unnecessary files
run: |
sudo apt-get remove --purge -y mono-* \
ghc* cabal-install* \
dotnet* \
php* \
ruby* \
mysql-* \
postgresql-* \
azure-cli \
powershell \
google-cloud-sdk \
msodbcsql* mssql-tools* \
imagemagick* \
libgl1-mesa-dri \
google-chrome-stable \
firefox
sudo apt-get autoremove -y
sudo apt-get clean
- run: |
sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }} if: ${{ github.event.inputs.runner == 'fast' }}
run: sudo mount -t tmpfs tmpfs .
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
- uses: ./.github/actions/setup-build
- uses: actions/setup-node@v4
with: with:
nodejs-version: ${{ env.NODEJS_VERSION }} node-version: ${{ env.NODEJS_VERSION }}
- name: Set up docker QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure sccache
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
- name: Make - name: Make
run: TARGET=${{ matrix.triple }} make cli run: TARGET=${{ matrix.triple }} make cli

View File

@@ -1,4 +1,4 @@
name: start-registry name: Start-Registry
on: on:
workflow_call: workflow_call:
@@ -35,10 +35,6 @@ on:
- master - master
- next/* - next/*
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: true
env: env:
NODEJS_VERSION: "24.11.0" NODEJS_VERSION: "24.11.0"
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}' ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
@@ -46,7 +42,6 @@ env:
jobs: jobs:
compile: compile:
name: Build Debian Package name: Build Debian Package
if: github.event.pull_request.draft != true
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
@@ -61,15 +56,50 @@ jobs:
}} }}
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }} runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
steps: steps:
- name: Mount tmpfs - name: Cleaning up unnecessary files
run: |
sudo apt-get remove --purge -y mono-* \
ghc* cabal-install* \
dotnet* \
php* \
ruby* \
mysql-* \
postgresql-* \
azure-cli \
powershell \
google-cloud-sdk \
msodbcsql* mssql-tools* \
imagemagick* \
libgl1-mesa-dri \
google-chrome-stable \
firefox
sudo apt-get autoremove -y
sudo apt-get clean
- run: |
sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }} if: ${{ github.event.inputs.runner == 'fast' }}
run: sudo mount -t tmpfs tmpfs .
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
- uses: ./.github/actions/setup-build
- uses: actions/setup-node@v4
with: with:
nodejs-version: ${{ env.NODEJS_VERSION }} node-version: ${{ env.NODEJS_VERSION }}
- name: Set up docker QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure sccache
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
- name: Make - name: Make
run: make registry-deb run: make registry-deb

View File

@@ -1,4 +1,4 @@
name: start-tunnel name: Start-Tunnel
on: on:
workflow_call: workflow_call:
@@ -35,10 +35,6 @@ on:
- master - master
- next/* - next/*
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: true
env: env:
NODEJS_VERSION: "24.11.0" NODEJS_VERSION: "24.11.0"
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}' ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
@@ -46,7 +42,6 @@ env:
jobs: jobs:
compile: compile:
name: Build Debian Package name: Build Debian Package
if: github.event.pull_request.draft != true
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
@@ -61,15 +56,50 @@ jobs:
}} }}
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }} runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
steps: steps:
- name: Mount tmpfs - name: Cleaning up unnecessary files
run: |
sudo apt-get remove --purge -y mono-* \
ghc* cabal-install* \
dotnet* \
php* \
ruby* \
mysql-* \
postgresql-* \
azure-cli \
powershell \
google-cloud-sdk \
msodbcsql* mssql-tools* \
imagemagick* \
libgl1-mesa-dri \
google-chrome-stable \
firefox
sudo apt-get autoremove -y
sudo apt-get clean
- run: |
sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }} if: ${{ github.event.inputs.runner == 'fast' }}
run: sudo mount -t tmpfs tmpfs .
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
- uses: ./.github/actions/setup-build
- uses: actions/setup-node@v4
with: with:
nodejs-version: ${{ env.NODEJS_VERSION }} node-version: ${{ env.NODEJS_VERSION }}
- name: Set up docker QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure sccache
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
- name: Make - name: Make
run: make tunnel-deb run: make tunnel-deb

View File

@@ -27,7 +27,7 @@ on:
- x86_64-nonfree - x86_64-nonfree
- aarch64 - aarch64
- aarch64-nonfree - aarch64-nonfree
# - raspberrypi - raspberrypi
- riscv64 - riscv64
deploy: deploy:
type: choice type: choice
@@ -45,10 +45,6 @@ on:
- master - master
- next/* - next/*
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: true
env: env:
NODEJS_VERSION: "24.11.0" NODEJS_VERSION: "24.11.0"
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}' ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
@@ -56,7 +52,6 @@ env:
jobs: jobs:
compile: compile:
name: Compile Base Binaries name: Compile Base Binaries
if: github.event.pull_request.draft != true
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
@@ -91,16 +86,54 @@ jobs:
)[github.event.inputs.runner == 'fast'] )[github.event.inputs.runner == 'fast']
}} }}
steps: steps:
- name: Mount tmpfs - name: Cleaning up unnecessary files
run: |
sudo apt-get remove --purge -y azure-cli || true
sudo apt-get remove --purge -y firefox || true
sudo apt-get remove --purge -y ghc-* || true
sudo apt-get remove --purge -y google-cloud-sdk || true
sudo apt-get remove --purge -y google-chrome-stable || true
sudo apt-get remove --purge -y powershell || true
sudo apt-get remove --purge -y php* || true
sudo apt-get remove --purge -y ruby* || true
sudo apt-get remove --purge -y mono-* || true
sudo apt-get autoremove -y
sudo apt-get clean
sudo rm -rf /usr/lib/jvm # All JDKs
sudo rm -rf /usr/local/.ghcup # Haskell toolchain
sudo rm -rf /usr/local/lib/android # Android SDK/NDK, emulator
sudo rm -rf /usr/share/dotnet # .NET SDKs
sudo rm -rf /usr/share/swift # Swift toolchain (if present)
sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.)
- run: |
sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }} if: ${{ github.event.inputs.runner == 'fast' }}
run: sudo mount -t tmpfs tmpfs .
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
- uses: ./.github/actions/setup-build
- name: Set up Python
uses: actions/setup-python@v5
with: with:
nodejs-version: ${{ env.NODEJS_VERSION }} python-version: "3.x"
setup-python: "true"
- uses: actions/setup-node@v4
with:
node-version: ${{ env.NODEJS_VERSION }}
- name: Set up docker QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure sccache
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
- name: Make - name: Make
run: make ARCH=${{ matrix.arch }} compiled-${{ matrix.arch }}.tar run: make ARCH=${{ matrix.arch }} compiled-${{ matrix.arch }}.tar
@@ -118,14 +151,13 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
# TODO: re-add "raspberrypi" to the platform list below
platform: >- platform: >-
${{ ${{
fromJson( fromJson(
format( format(
'[ '[
["{0}"], ["{0}"],
["x86_64", "x86_64-nonfree", "aarch64", "aarch64-nonfree", "riscv64"] ["x86_64", "x86_64-nonfree", "aarch64", "aarch64-nonfree", "riscv64", "raspberrypi"]
]', ]',
github.event.inputs.platform || 'ALL' github.event.inputs.platform || 'ALL'
) )
@@ -189,10 +221,6 @@ jobs:
sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.) sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.)
if: ${{ github.event.inputs.runner != 'fast' }} if: ${{ github.event.inputs.runner != 'fast' }}
# BuildJet runners lack /opt/hostedtoolcache, which setup-qemu expects
- name: Ensure hostedtoolcache exists
run: sudo mkdir -p /opt/hostedtoolcache && sudo chown $USER:$USER /opt/hostedtoolcache
- name: Set up docker QEMU - name: Set up docker QEMU
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v3

View File

@@ -10,10 +10,6 @@ on:
- master - master
- next/* - next/*
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: true
env: env:
NODEJS_VERSION: "24.11.0" NODEJS_VERSION: "24.11.0"
ENVIRONMENT: dev-unstable ENVIRONMENT: dev-unstable
@@ -21,18 +17,15 @@ env:
jobs: jobs:
test: test:
name: Run Automated Tests name: Run Automated Tests
if: github.event.pull_request.draft != true
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
- uses: ./.github/actions/setup-build
- uses: actions/setup-node@v4
with: with:
nodejs-version: ${{ env.NODEJS_VERSION }} node-version: ${{ env.NODEJS_VERSION }}
free-space: "false"
setup-docker: "false"
setup-sccache: "false"
- name: Build And Run Tests - name: Build And Run Tests
run: make test run: make test

4
.gitignore vendored
View File

@@ -19,6 +19,4 @@ secrets.db
/compiled.tar /compiled.tar
/compiled-*.tar /compiled-*.tar
/build/lib/firmware /build/lib/firmware
tmp tmp
web/.i18n-checked
agents/USER.md

146
CLAUDE.md
View File

@@ -1,146 +0,0 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
StartOS is an open-source Linux distribution for running personal servers. It manages discovery, installation, network configuration, backups, and health monitoring of self-hosted services.
**Tech Stack:**
- Backend: Rust (async/Tokio, Axum web framework)
- Frontend: Angular 20 + TypeScript + TaigaUI
- Container runtime: Node.js/TypeScript with LXC
- Database/State: Patch-DB (git submodule) - storage layer with reactive frontend sync
- API: JSON-RPC via rpc-toolkit (see `agents/rpc-toolkit.md`)
- Auth: Password + session cookie, public/private key signatures, local authcookie (see `core/src/middleware/auth/`)
## Build & Development
See [CONTRIBUTING.md](CONTRIBUTING.md) for:
- Environment setup and requirements
- Build commands and make targets
- Testing and formatting commands
- Environment variables
**Quick reference:**
```bash
. ./devmode.sh # Enable dev mode
make update-startbox REMOTE=start9@<ip> # Fastest iteration (binary + UI)
make test-core # Run Rust tests
```
## Architecture
### Core (`/core`)
The Rust backend daemon. Main binaries:
- `startbox` - Main daemon (runs as `startd`)
- `start-cli` - CLI interface
- `start-container` - Runs inside LXC containers; communicates with host and manages subcontainers
- `registrybox` - Registry daemon
- `tunnelbox` - VPN/tunnel daemon
**Key modules:**
- `src/context/` - Context types (RpcContext, CliContext, InitContext, DiagnosticContext)
- `src/service/` - Service lifecycle management with actor pattern (`service_actor.rs`)
- `src/db/model/` - Patch-DB models (`public.rs` synced to frontend, `private.rs` backend-only)
- `src/net/` - Networking (DNS, ACME, WiFi, Tor via Arti, WireGuard)
- `src/s9pk/` - S9PK package format (merkle archive)
- `src/registry/` - Package registry management
**RPC Pattern:** See `agents/rpc-toolkit.md`
### Web (`/web`)
Angular projects sharing common code:
- `projects/ui/` - Main admin interface
- `projects/setup-wizard/` - Initial setup
- `projects/start-tunnel/` - VPN management UI
- `projects/shared/` - Common library (API clients, components)
- `projects/marketplace/` - Service discovery
**Development:**
```bash
cd web
npm ci
npm run start:ui # Dev server with mocks
npm run build:ui # Production build
npm run check # Type check all projects
```
### Container Runtime (`/container-runtime`)
Node.js runtime that manages service containers via RPC. See `RPCSpec.md` for protocol.
**Container Architecture:**
```
LXC Container (uniform base for all services)
└── systemd
└── container-runtime.service
└── Loads /usr/lib/startos/package/index.js (from s9pk javascript.squashfs)
└── Package JS launches subcontainers (from images in s9pk)
```
The container runtime communicates with the host via JSON-RPC over Unix socket. Package JavaScript must export functions conforming to the `ABI` type defined in `sdk/base/lib/types.ts`.
**`/media/startos/` directory (mounted by host into container):**
| Path | Description |
|------|-------------|
| `volumes/<name>/` | Package data volumes (id-mapped, persistent) |
| `assets/` | Read-only assets from s9pk `assets.squashfs` |
| `images/<name>/` | Container images (squashfs, used for subcontainers) |
| `images/<name>.env` | Environment variables for image |
| `images/<name>.json` | Image metadata |
| `backup/` | Backup mount point (mounted during backup operations) |
| `rpc/service.sock` | RPC socket (container runtime listens here) |
| `rpc/host.sock` | Host RPC socket (for effects callbacks to host) |
**S9PK Structure:** See `agents/s9pk-structure.md`
### SDK (`/sdk`)
TypeScript SDK for packaging services (`@start9labs/start-sdk`).
- `base/` - Core types, ABI definitions, effects interface (`@start9labs/start-sdk-base`)
- `package/` - Full SDK for package developers, re-exports base
### Patch-DB (`/patch-db`)
Git submodule providing diff-based state synchronization. Changes to `db/model/public.rs` automatically sync to the frontend.
**Key patterns:**
- `db.peek().await` - Get a read-only snapshot of the database state
- `db.mutate(|db| { ... }).await` - Apply mutations atomically, returns `MutateResult`
- `#[derive(HasModel)]` - Derive macro for types stored in the database, generates typed accessors
**Generated accessor types** (from `HasModel` derive):
- `as_field()` - Immutable reference: `&Model<T>`
- `as_field_mut()` - Mutable reference: `&mut Model<T>`
- `into_field()` - Owned value: `Model<T>`
**`Model<T>` APIs** (from `db/prelude.rs`):
- `.de()` - Deserialize to `T`
- `.ser(&value)` - Serialize from `T`
- `.mutate(|v| ...)` - Deserialize, mutate, reserialize
- For maps: `.keys()`, `.as_idx(&key)`, `.as_idx_mut(&key)`, `.insert()`, `.remove()`, `.contains_key()`
## Supplementary Documentation
The `agents/` directory contains detailed documentation for AI assistants:
- `TODO.md` - Pending tasks for AI agents (check this first, remove items when completed)
- `USER.md` - Current user identifier (gitignored, see below)
- `rpc-toolkit.md` - JSON-RPC patterns and handler configuration
- `core-rust-patterns.md` - Common utilities and patterns for Rust code in `/core` (guard pattern, mount guards, etc.)
- `s9pk-structure.md` - S9PK package format structure
- `i18n-patterns.md` - Internationalization key conventions and usage in `/core`
### Session Startup
On startup:
1. **Check for `agents/USER.md`** - If it doesn't exist, prompt the user for their name/identifier and create it. This file is gitignored since it varies per developer.
2. **Check `agents/TODO.md` for relevant tasks** - Show TODOs that either:
- Have no `@username` tag (relevant to everyone)
- Are tagged with the current user's identifier
Skip TODOs tagged with a different user.
3. **Ask "What would you like to do today?"** - Offer options for each relevant TODO item, plus "Something else" for other requests.

View File

@@ -11,190 +11,123 @@ This guide is for contributing to the StartOS. If you are interested in packagin
```bash ```bash
/ /
├── assets/ # Screenshots for README ├── assets/
├── build/ # Auxiliary files and scripts for deployed images ├── container-runtime/
├── container-runtime/ # Node.js program managing package containers ├── core/
├── core/ # Rust backend: API, daemon (startd), CLI (start-cli) ├── build/
├── debian/ # Debian package maintainer scripts ├── debian/
├── image-recipe/ # Scripts for building StartOS images ├── web/
├── patch-db/ # (submodule) Diff-based data store for frontend sync ├── image-recipe/
├── sdk/ # TypeScript SDK for building StartOS packages ├── patch-db
└── web/ # Web UIs (Angular) └── sdk/
``` ```
See component READMEs for details: #### assets
- [`core`](core/README.md)
- [`web`](web/README.md) screenshots for the StartOS README
- [`build`](build/README.md)
- [`patch-db`](https://github.com/Start9Labs/patch-db) #### container-runtime
A NodeJS program that dynamically loads maintainer scripts and communicates with the OS to manage packages
#### core
An API, daemon (startd), and CLI (start-cli) that together provide the core functionality of StartOS.
#### build
Auxiliary files and scripts to include in deployed StartOS images
#### debian
Maintainer scripts for the StartOS Debian package
#### web
Web UIs served under various conditions and used to interact with StartOS APIs.
#### image-recipe
Scripts for building StartOS images
#### patch-db (submodule)
A diff based data store used to synchronize data between the web interfaces and server.
#### sdk
A typescript sdk for building start-os packages
## Environment Setup ## Environment Setup
#### Clone the StartOS repository
```sh ```sh
git clone https://github.com/Start9Labs/start-os.git --recurse-submodules git clone https://github.com/Start9Labs/start-os.git --recurse-submodules
cd start-os cd start-os
``` ```
### Development Mode #### Continue to your project of interest for additional instructions:
For faster iteration during development: - [`core`](core/README.md)
- [`web-interfaces`](web-interfaces/README.md)
```sh - [`build`](build/README.md)
. ./devmode.sh - [`patch-db`](https://github.com/Start9Labs/patch-db)
```
This sets `ENVIRONMENT=dev` and `GIT_BRANCH_AS_HASH=1` to prevent rebuilds on every commit.
## Building ## Building
All builds can be performed on any operating system that can run Docker. This project uses [GNU Make](https://www.gnu.org/software/make/) to build its components. To build any specific component, simply run `make <TARGET>` replacing `<TARGET>` with the name of the target you'd like to build
This project uses [GNU Make](https://www.gnu.org/software/make/) to build its components.
### Requirements ### Requirements
- [GNU Make](https://www.gnu.org/software/make/) - [GNU Make](https://www.gnu.org/software/make/)
- [Docker](https://docs.docker.com/get-docker/) or [Podman](https://podman.io/) - [Docker](https://docs.docker.com/get-docker/)
- [NodeJS v20.16.0](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm) - [NodeJS v20.16.0](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)
- [Rust](https://rustup.rs/) (nightly for formatting) - [sed](https://www.gnu.org/software/sed/)
- [sed](https://www.gnu.org/software/sed/), [grep](https://www.gnu.org/software/grep/), [awk](https://www.gnu.org/software/gawk/) - [grep](https://www.gnu.org/software/grep/)
- [awk](https://www.gnu.org/software/gawk/)
- [jq](https://jqlang.github.io/jq/) - [jq](https://jqlang.github.io/jq/)
- [gzip](https://www.gnu.org/software/gzip/), [brotli](https://github.com/google/brotli) - [gzip](https://www.gnu.org/software/gzip/)
- [brotli](https://github.com/google/brotli)
### Environment Variables ### Environment variables
| Variable | Description | - `PLATFORM`: which platform you would like to build for. Must be one of `x86_64`, `x86_64-nonfree`, `aarch64`, `aarch64-nonfree`, `raspberrypi`
|----------|-------------| - NOTE: `nonfree` images are for including `nonfree` firmware packages in the built ISO
| `PLATFORM` | Target platform: `x86_64`, `x86_64-nonfree`, `aarch64`, `aarch64-nonfree`, `riscv64`, `raspberrypi` | - `ENVIRONMENT`: a hyphen separated set of feature flags to enable
| `ENVIRONMENT` | Hyphen-separated feature flags (see below) | - `dev`: enables password ssh (INSECURE!) and does not compress frontends
| `PROFILE` | Build profile: `release` (default) or `dev` | - `unstable`: enables assertions that will cause errors on unexpected inconsistencies that are undesirable in production use either for performance or reliability reasons
| `GIT_BRANCH_AS_HASH` | Set to `1` to use git branch name as version hash (avoids rebuilds) | - `docker`: use `docker` instead of `podman`
- `GIT_BRANCH_AS_HASH`: set to `1` to use the current git branch name as the git hash so that the project does not need to be rebuilt on each commit
**ENVIRONMENT flags:** ### Useful Make Targets
- `dev` - Enables password SSH before setup, skips frontend compression
- `unstable` - Enables assertions and debugging with performance penalty
- `console` - Enables tokio-console for async debugging
**Platform notes:**
- `-nonfree` variants include proprietary firmware and drivers
- `raspberrypi` includes non-free components by necessity
- Platform is remembered between builds if not specified
### Make Targets
#### Building
| Target | Description |
|--------|-------------|
| `iso` | Create full `.iso` image (not for raspberrypi) |
| `img` | Create full `.img` image (raspberrypi only) |
| `deb` | Build Debian package |
| `all` | Build all Rust binaries |
| `uis` | Build all web UIs |
| `ui` | Build main UI only |
| `ts-bindings` | Generate TypeScript bindings from Rust types |
#### Deploying to Device
For devices on the same network:
| Target | Description |
|--------|-------------|
| `update-startbox REMOTE=start9@<ip>` | Deploy binary + UI only (fastest) |
| `update-deb REMOTE=start9@<ip>` | Deploy full Debian package |
| `update REMOTE=start9@<ip>` | OTA-style update |
| `reflash REMOTE=start9@<ip>` | Reflash as if using live ISO |
| `update-overlay REMOTE=start9@<ip>` | Deploy to in-memory overlay (reverts on reboot) |
For devices on different networks (uses [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)):
| Target | Description |
|--------|-------------|
| `wormhole` | Send startbox binary |
| `wormhole-deb` | Send Debian package |
| `wormhole-squashfs` | Send squashfs image |
#### Other
| Target | Description |
|--------|-------------|
| `format` | Run code formatting (Rust nightly required) |
| `test` | Run all automated tests |
| `test-core` | Run Rust tests |
| `test-sdk` | Run SDK tests |
| `test-container-runtime` | Run container runtime tests |
| `clean` | Delete all compiled artifacts |
## Testing
```bash
make test # All tests
make test-core # Rust tests (via ./core/run-tests.sh)
make test-sdk # SDK tests
make test-container-runtime # Container runtime tests
# Run specific Rust test
cd core && cargo test <test_name> --features=test
```
## Code Formatting
```bash
# Rust (requires nightly)
make format
# TypeScript/HTML/SCSS (web)
cd web && npm run format
```
## Code Style Guidelines
### Formatting
Run the formatters before committing. Configuration is handled by `rustfmt.toml` (Rust) and prettier configs (TypeScript).
### Documentation & Comments
**Rust:**
- Add doc comments (`///`) to public APIs, structs, and non-obvious functions
- Use `//` comments sparingly for complex logic that isn't self-evident
- Prefer self-documenting code (clear naming, small functions) over comments
**TypeScript:**
- Document exported functions and complex types with JSDoc
- Keep comments focused on "why" rather than "what"
**General:**
- Don't add comments that just restate the code
- Update or remove comments when code changes
- TODOs should include context: `// TODO(username): reason`
### Commit Messages
Use [Conventional Commits](https://www.conventionalcommits.org/):
```
<type>(<scope>): <description>
[optional body]
[optional footer]
```
**Types:**
- `feat` - New feature
- `fix` - Bug fix
- `docs` - Documentation only
- `style` - Formatting, no code change
- `refactor` - Code change that neither fixes a bug nor adds a feature
- `test` - Adding or updating tests
- `chore` - Build process, dependencies, etc.
**Examples:**
```
feat(web): add dark mode toggle
fix(core): resolve race condition in service startup
docs: update CONTRIBUTING.md with style guidelines
refactor(sdk): simplify package validation logic
```
- `iso`: Create a full `.iso` image
- Only possible from Debian
- Not available for `PLATFORM=raspberrypi`
- Additional Requirements:
- [debspawn](https://github.com/lkhq/debspawn)
- `img`: Create a full `.img` image
- Only possible from Debian
- Only available for `PLATFORM=raspberrypi`
- Additional Requirements:
- [debspawn](https://github.com/lkhq/debspawn)
- `format`: Run automatic code formatting for the project
- Additional Requirements:
- [rust](https://rustup.rs/)
- `test`: Run automated tests for the project
- Additional Requirements:
- [rust](https://rustup.rs/)
- `update`: Deploy the current working project to a device over ssh as if through an over-the-air update
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
- `reflash`: Deploy the current working project to a device over ssh as if using a live `iso` image to reflash it
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
- `update-overlay`: Deploy the current working project to a device over ssh to the in-memory overlay without restarting it
- WARNING: changes will be reverted after the device is rebooted
- WARNING: changes to `init` will not take effect as the device is already initialized
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
- `wormhole`: Deploy the `startbox` to a device using [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)
- When the build it complete will emit a command to paste into the shell of the device to upgrade it
- Additional Requirements:
- [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)
- `clean`: Delete all compiled artifacts

View File

@@ -324,19 +324,15 @@ web/.angular/.updated: patch-db/client/dist/index.js sdk/baseDist/package.json w
mkdir -p web/.angular mkdir -p web/.angular
touch web/.angular/.updated touch web/.angular/.updated
web/.i18n-checked: $(WEB_SHARED_SRC) $(WEB_UI_SRC) $(WEB_SETUP_WIZARD_SRC) $(WEB_START_TUNNEL_SRC) web/dist/raw/ui/index.html: $(WEB_UI_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
npm --prefix web run check:i18n
touch web/.i18n-checked
web/dist/raw/ui/index.html: $(WEB_UI_SRC) $(WEB_SHARED_SRC) web/.angular/.updated web/.i18n-checked
npm --prefix web run build:ui npm --prefix web run build:ui
touch web/dist/raw/ui/index.html touch web/dist/raw/ui/index.html
web/dist/raw/setup-wizard/index.html: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular/.updated web/.i18n-checked web/dist/raw/setup-wizard/index.html: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
npm --prefix web run build:setup npm --prefix web run build:setup
touch web/dist/raw/setup-wizard/index.html touch web/dist/raw/setup-wizard/index.html
web/dist/raw/start-tunnel/index.html: $(WEB_START_TUNNEL_SRC) $(WEB_SHARED_SRC) web/.angular/.updated web/.i18n-checked web/dist/raw/start-tunnel/index.html: $(WEB_START_TUNNEL_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
npm --prefix web run build:tunnel npm --prefix web run build:tunnel
touch web/dist/raw/start-tunnel/index.html touch web/dist/raw/start-tunnel/index.html

View File

@@ -1,9 +0,0 @@
# AI Agent TODOs
Pending tasks for AI agents. Remove items when completed.
## Unreviewed CLAUDE.md Sections
- [ ] Architecture - Web (`/web`) - @MattDHill

View File

@@ -1,249 +0,0 @@
# Utilities & Patterns
This document covers common utilities and patterns used throughout the StartOS codebase.
## Util Module (`core/src/util/`)
The `util` module contains reusable utilities. Key submodules:
| Module | Purpose |
|--------|---------|
| `actor/` | Actor pattern implementation for concurrent state management |
| `collections/` | Custom collection types |
| `crypto.rs` | Cryptographic utilities (encryption, hashing) |
| `future.rs` | Future/async utilities |
| `io.rs` | File I/O helpers (create_file, canonicalize, etc.) |
| `iter.rs` | Iterator extensions |
| `net.rs` | Network utilities |
| `rpc.rs` | RPC helpers |
| `rpc_client.rs` | RPC client utilities |
| `serde.rs` | Serialization helpers (Base64, display/fromstr, etc.) |
| `sync.rs` | Synchronization primitives (SyncMutex, etc.) |
## Command Invocation (`Invoke` trait)
The `Invoke` trait provides a clean way to run external commands with error handling:
```rust
use crate::util::Invoke;
// Simple invocation
tokio::process::Command::new("ls")
.arg("-la")
.invoke(ErrorKind::Filesystem)
.await?;
// With timeout
tokio::process::Command::new("slow-command")
.timeout(Some(Duration::from_secs(30)))
.invoke(ErrorKind::Timeout)
.await?;
// With input
let mut input = Cursor::new(b"input data");
tokio::process::Command::new("cat")
.input(Some(&mut input))
.invoke(ErrorKind::Filesystem)
.await?;
// Piped commands
tokio::process::Command::new("cat")
.arg("file.txt")
.pipe(&mut tokio::process::Command::new("grep").arg("pattern"))
.invoke(ErrorKind::Filesystem)
.await?;
```
## Guard Pattern
Guards ensure cleanup happens when they go out of scope.
### `GeneralGuard` / `GeneralBoxedGuard`
For arbitrary cleanup actions:
```rust
use crate::util::GeneralGuard;
let guard = GeneralGuard::new(|| {
println!("Cleanup runs on drop");
});
// Do work...
// Explicit drop with action
guard.drop();
// Or skip the action
// guard.drop_without_action();
```
### `FileLock`
File-based locking with automatic unlock:
```rust
use crate::util::FileLock;
let lock = FileLock::new("/path/to/lockfile", true).await?; // blocking=true
// Lock held until dropped or explicitly unlocked
lock.unlock().await?;
```
## Mount Guard Pattern (`core/src/disk/mount/guard.rs`)
RAII guards for filesystem mounts. Ensures filesystems are unmounted when guards are dropped.
### `MountGuard`
Basic mount guard:
```rust
use crate::disk::mount::guard::MountGuard;
use crate::disk::mount::filesystem::{MountType, ReadOnly};
let guard = MountGuard::mount(&filesystem, "/mnt/target", ReadOnly).await?;
// Use the mounted filesystem at guard.path()
do_something(guard.path()).await?;
// Explicit unmount (or auto-unmounts on drop)
guard.unmount(false).await?; // false = don't delete mountpoint
```
### `TmpMountGuard`
Reference-counted temporary mount (mounts to `/media/startos/tmp/`):
```rust
use crate::disk::mount::guard::TmpMountGuard;
use crate::disk::mount::filesystem::ReadOnly;
// Multiple clones share the same mount
let guard1 = TmpMountGuard::mount(&filesystem, ReadOnly).await?;
let guard2 = guard1.clone();
// Mount stays alive while any guard exists
// Auto-unmounts when last guard is dropped
```
### `GenericMountGuard` trait
All mount guards implement this trait:
```rust
pub trait GenericMountGuard: std::fmt::Debug + Send + Sync + 'static {
fn path(&self) -> &Path;
fn unmount(self) -> impl Future<Output = Result<(), Error>> + Send;
}
```
### `SubPath`
Wraps a mount guard to point to a subdirectory:
```rust
use crate::disk::mount::guard::SubPath;
let mount = TmpMountGuard::mount(&filesystem, ReadOnly).await?;
let subdir = SubPath::new(mount, "data/subdir");
// subdir.path() returns the full path including subdirectory
```
## FileSystem Implementations (`core/src/disk/mount/filesystem/`)
Various filesystem types that can be mounted:
| Type | Description |
|------|-------------|
| `bind.rs` | Bind mounts |
| `block_dev.rs` | Block device mounts |
| `cifs.rs` | CIFS/SMB network shares |
| `ecryptfs.rs` | Encrypted filesystem |
| `efivarfs.rs` | EFI variables |
| `httpdirfs.rs` | HTTP directory as filesystem |
| `idmapped.rs` | ID-mapped mounts |
| `label.rs` | Mount by label |
| `loop_dev.rs` | Loop device mounts |
| `overlayfs.rs` | Overlay filesystem |
## Other Useful Utilities
### `Apply` / `ApplyRef` traits
Fluent method chaining:
```rust
use crate::util::Apply;
let result = some_value
.apply(|v| transform(v))
.apply(|v| another_transform(v));
```
### `Container<T>`
Async-safe optional container:
```rust
use crate::util::Container;
let container = Container::new(None);
container.set(value).await;
let taken = container.take().await;
```
### `HashWriter<H, W>`
Write data while computing hash:
```rust
use crate::util::HashWriter;
use sha2::Sha256;
let writer = HashWriter::new(Sha256::new(), file);
// Write data...
let (hasher, file) = writer.finish();
let hash = hasher.finalize();
```
### `Never` type
Uninhabited type for impossible cases:
```rust
use crate::util::Never;
fn impossible() -> Never {
// This function can never return
}
let never: Never = impossible();
never.absurd::<String>() // Can convert to any type
```
### `MaybeOwned<'a, T>`
Either borrowed or owned data:
```rust
use crate::util::MaybeOwned;
fn accept_either(data: MaybeOwned<'_, String>) {
// Use &*data to access the value
}
accept_either(MaybeOwned::from(&existing_string));
accept_either(MaybeOwned::from(owned_string));
```
### `new_guid()`
Generate a random GUID:
```rust
use crate::util::new_guid;
let guid = new_guid(); // Returns InternedString
```

View File

@@ -1,100 +0,0 @@
# i18n Patterns in `core/`
## Library & Setup
**Crate:** [`rust-i18n`](https://crates.io/crates/rust-i18n) v3.1.5 (`core/Cargo.toml`)
**Initialization** (`core/src/lib.rs:3`):
```rust
rust_i18n::i18n!("locales", fallback = ["en_US"]);
```
This macro scans `core/locales/` at compile time and embeds all translations as constants.
**Prelude re-export** (`core/src/prelude.rs:4`):
```rust
pub use rust_i18n::t;
```
Most modules import `t!` via the prelude.
## Translation File
**Location:** `core/locales/i18n.yaml`
**Format:** YAML v2 (~755 keys)
**Supported languages:** `en_US`, `de_DE`, `es_ES`, `fr_FR`, `pl_PL`
**Entry structure:**
```yaml
namespace.sub.key-name:
en_US: "English text with %{param}"
de_DE: "German text with %{param}"
# ...
```
## Using `t!()`
```rust
// Simple key
t!("error.unknown")
// With parameter interpolation (%{name} in YAML)
t!("bins.deprecated.renamed", old = old_name, new = new_name)
```
## Key Naming Conventions
Keys use **dot-separated hierarchical namespaces** with **kebab-case** for multi-word segments:
```
<module>.<submodule>.<descriptive-name>
```
Examples:
- `error.incorrect-password` — error kind label
- `bins.start-init.updating-firmware` — startup phase message
- `backup.bulk.complete-title` — backup notification title
- `help.arg.acme-contact` — CLI help text for an argument
- `context.diagnostic.starting-diagnostic-ui` — diagnostic context status
### Top-Level Namespaces
| Namespace | Purpose |
|-----------|---------|
| `error.*` | `ErrorKind` display strings (see `src/error.rs`) |
| `bins.*` | CLI binary messages (deprecated, start-init, startd, etc.) |
| `init.*` | Initialization phase labels |
| `setup.*` | First-run setup messages |
| `context.*` | Context startup messages (diagnostic, setup, CLI) |
| `service.*` | Service lifecycle messages |
| `backup.*` | Backup/restore operation messages |
| `registry.*` | Package registry messages |
| `net.*` | Network-related messages |
| `middleware.*` | Request middleware messages (auth, etc.) |
| `disk.*` | Disk operation messages |
| `lxc.*` | Container management messages |
| `system.*` | System monitoring/metrics messages |
| `notifications.*` | User-facing notification messages |
| `update.*` | OS update messages |
| `util.*` | Utility messages (TUI, RPC) |
| `ssh.*` | SSH operation messages |
| `shutdown.*` | Shutdown-related messages |
| `logs.*` | Log-related messages |
| `auth.*` | Authentication messages |
| `help.*` | CLI help text (`help.arg.<arg-name>`) |
| `about.*` | CLI command descriptions |
## Locale Selection
`core/src/bins/mod.rs:15-36``set_locale_from_env()`:
1. Reads `LANG` environment variable
2. Strips `.UTF-8` suffix
3. Exact-matches against available locales, falls back to language-prefix match (e.g. `en_GB` matches `en_US`)
## Adding New Keys
1. Add the key to `core/locales/i18n.yaml` with all 5 language translations
2. Use the `t!("your.key.name")` macro in Rust code
3. Follow existing namespace conventions — match the module path where the key is used
4. Use kebab-case for multi-word segments
5. Translations are validated at compile time

View File

@@ -1,226 +0,0 @@
# rpc-toolkit
StartOS uses [rpc-toolkit](https://github.com/Start9Labs/rpc-toolkit) for its JSON-RPC API. This document covers the patterns used in this codebase.
## Overview
The API is JSON-RPC (not REST). All endpoints are RPC methods organized in a hierarchical command structure.
## Handler Functions
There are four types of handler functions, chosen based on the function's characteristics:
### `from_fn_async` - Async handlers
For standard async functions. Most handlers use this.
```rust
pub async fn my_handler(ctx: RpcContext, params: MyParams) -> Result<MyResponse, Error> {
// Can use .await
}
from_fn_async(my_handler)
```
### `from_fn_async_local` - Non-thread-safe async handlers
For async functions that are not `Send` (cannot be safely moved between threads). Use when working with non-thread-safe types.
```rust
pub async fn cli_download(ctx: CliContext, params: Params) -> Result<(), Error> {
// Non-Send async operations
}
from_fn_async_local(cli_download)
```
### `from_fn_blocking` - Sync blocking handlers
For synchronous functions that perform blocking I/O or long computations.
```rust
pub fn query_dns(ctx: RpcContext, params: DnsParams) -> Result<DnsResponse, Error> {
// Blocking operations (file I/O, DNS lookup, etc.)
}
from_fn_blocking(query_dns)
```
### `from_fn` - Sync non-blocking handlers
For pure functions or quick synchronous operations with no I/O.
```rust
pub fn echo(ctx: RpcContext, params: EchoParams) -> Result<String, Error> {
Ok(params.message)
}
from_fn(echo)
```
## ParentHandler
Groups related RPC methods into a hierarchy:
```rust
use rpc_toolkit::{Context, HandlerExt, ParentHandler, from_fn_async};
pub fn my_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
.subcommand("list", from_fn_async(list_handler).with_call_remote::<CliContext>())
.subcommand("create", from_fn_async(create_handler).with_call_remote::<CliContext>())
}
```
## Handler Extensions
Chain methods to configure handler behavior.
**Ordering rules:**
1. `with_about()` must come AFTER other CLI modifiers (`no_display()`, `with_custom_display_fn()`, etc.)
2. `with_call_remote()` must be the LAST adapter in the chain
| Method | Purpose |
|--------|---------|
| `.with_metadata("key", Value)` | Attach metadata for middleware |
| `.no_cli()` | RPC-only, not available via CLI |
| `.no_display()` | No CLI output |
| `.with_display_serializable()` | Default JSON/YAML output for CLI |
| `.with_custom_display_fn(\|_, res\| ...)` | Custom CLI output formatting |
| `.with_about("about.description")` | Add help text (i18n key) - **after CLI modifiers** |
| `.with_call_remote::<CliContext>()` | Enable CLI to call remotely - **must be last** |
### Correct ordering example:
```rust
from_fn_async(my_handler)
.with_metadata("sync_db", Value::Bool(true)) // metadata early
.no_display() // CLI modifier
.with_about("about.my-handler") // after CLI modifiers
.with_call_remote::<CliContext>() // always last
```
## Metadata by Middleware
Metadata tags are processed by different middleware. Group them logically:
### Auth Middleware (`middleware/auth/mod.rs`)
| Metadata | Default | Description |
|----------|---------|-------------|
| `authenticated` | `true` | Whether endpoint requires authentication. Set to `false` for public endpoints. |
### Session Auth Middleware (`middleware/auth/session.rs`)
| Metadata | Default | Description |
|----------|---------|-------------|
| `login` | `false` | Special handling for login endpoints (rate limiting, cookie setting) |
| `get_session` | `false` | Inject session ID into params as `__Auth_session` |
### Signature Auth Middleware (`middleware/auth/signature.rs`)
| Metadata | Default | Description |
|----------|---------|-------------|
| `get_signer` | `false` | Inject signer public key into params as `__Auth_signer` |
### Registry Auth (extends Signature Auth)
| Metadata | Default | Description |
|----------|---------|-------------|
| `admin` | `false` | Require admin privileges (signer must be in admin list) |
| `get_device_info` | `false` | Inject device info header for hardware filtering |
### Database Middleware (`middleware/db.rs`)
| Metadata | Default | Description |
|----------|---------|-------------|
| `sync_db` | `false` | Sync database after mutation, add `X-Patch-Sequence` header |
## Context Types
Different contexts for different execution environments:
- `RpcContext` - Web/RPC requests with full service access
- `CliContext` - CLI operations, calls remote RPC
- `InitContext` - During system initialization
- `DiagnosticContext` - Diagnostic/recovery mode
- `RegistryContext` - Registry daemon context
- `EffectContext` - Service effects context (container-to-host calls)
## Parameter Structs
Parameters use derive macros for JSON-RPC, CLI parsing, and TypeScript generation:
```rust
#[derive(Deserialize, Serialize, Parser, TS)]
#[serde(rename_all = "camelCase")] // JSON-RPC uses camelCase
#[command(rename_all = "kebab-case")] // CLI uses kebab-case
#[ts(export)] // Generate TypeScript types
pub struct MyParams {
pub package_id: PackageId,
}
```
### Middleware Injection
Auth middleware can inject values into params using special field names:
```rust
#[derive(Deserialize, Serialize, Parser, TS)]
pub struct MyParams {
#[ts(skip)]
#[serde(rename = "__Auth_session")] // Injected by session auth
session: InternedString,
#[ts(skip)]
#[serde(rename = "__Auth_signer")] // Injected by signature auth
signer: AnyVerifyingKey,
#[ts(skip)]
#[serde(rename = "__Auth_userAgent")] // Injected during login
user_agent: Option<String>,
}
```
## Common Patterns
### Adding a New RPC Endpoint
1. Define params struct with `Deserialize, Serialize, Parser, TS`
2. Choose handler type based on sync/async and thread-safety
3. Write handler function taking `(Context, Params) -> Result<Response, Error>`
4. Add to parent handler with appropriate extensions (display modifiers before `with_about`)
5. TypeScript types auto-generated via `make ts-bindings`
### Public (Unauthenticated) Endpoint
```rust
from_fn_async(get_info)
.with_metadata("authenticated", Value::Bool(false))
.with_display_serializable()
.with_about("about.get-info")
.with_call_remote::<CliContext>() // last
```
### Mutating Endpoint with DB Sync
```rust
from_fn_async(update_config)
.with_metadata("sync_db", Value::Bool(true))
.no_display()
.with_about("about.update-config")
.with_call_remote::<CliContext>() // last
```
### Session-Aware Endpoint
```rust
from_fn_async(logout)
.with_metadata("get_session", Value::Bool(true))
.no_display()
.with_about("about.logout")
.with_call_remote::<CliContext>() // last
```
## File Locations
- Handler definitions: Throughout `core/src/` modules
- Main API tree: `core/src/lib.rs` (`main_api()`, `server()`, `package()`)
- Auth middleware: `core/src/middleware/auth/`
- DB middleware: `core/src/middleware/db.rs`
- Context types: `core/src/context/`

View File

@@ -1,122 +0,0 @@
# S9PK Package Format
S9PK is the package format for StartOS services. Version 2 uses a merkle archive structure for efficient downloading and cryptographic verification.
## File Format
S9PK files begin with a 3-byte header: `0x3b 0x3b 0x02` (magic bytes + version 2).
The archive is cryptographically signed using Ed25519 with prehashed content (SHA-512 over blake3 merkle root hash).
## Archive Structure
```
/
├── manifest.json # Package metadata (required)
├── icon.<ext> # Package icon - any image/* format (required)
├── LICENSE.md # License text (required)
├── dependencies/ # Dependency metadata (optional)
│ └── <package-id>/
│ ├── metadata.json # DependencyMetadata
│ └── icon.<ext> # Dependency icon
├── javascript.squashfs # Package JavaScript code (required)
├── assets.squashfs # Static assets (optional, legacy: assets/ directory)
└── images/ # Container images by architecture
└── <arch>/ # e.g., x86_64, aarch64, riscv64
├── <image-id>.squashfs # Container filesystem
├── <image-id>.json # Image metadata
└── <image-id>.env # Environment variables
```
## Components
### manifest.json
The package manifest contains all metadata:
| Field | Type | Description |
|-------|------|-------------|
| `id` | string | Package identifier (e.g., `bitcoind`) |
| `title` | string | Display name |
| `version` | string | Extended version string |
| `satisfies` | string[] | Version ranges this version satisfies |
| `releaseNotes` | string/object | Release notes (localized) |
| `canMigrateTo` | string | Version range for forward migration |
| `canMigrateFrom` | string | Version range for backward migration |
| `license` | string | License type |
| `wrapperRepo` | string | StartOS wrapper repository URL |
| `upstreamRepo` | string | Upstream project URL |
| `supportSite` | string | Support site URL |
| `marketingSite` | string | Marketing site URL |
| `donationUrl` | string? | Optional donation URL |
| `docsUrl` | string? | Optional documentation URL |
| `description` | object | Short and long descriptions (localized) |
| `images` | object | Image configurations by image ID |
| `volumes` | string[] | Volume IDs for persistent data |
| `alerts` | object | User alerts for lifecycle events |
| `dependencies` | object | Package dependencies |
| `hardwareRequirements` | object | Hardware requirements (arch, RAM, devices) |
| `hardwareAcceleration` | boolean | Whether package uses hardware acceleration |
| `gitHash` | string? | Git commit hash |
| `osVersion` | string | Minimum StartOS version |
| `sdkVersion` | string? | SDK version used to build |
### javascript.squashfs
Contains the package JavaScript that implements the `ABI` interface from `@start9labs/start-sdk-base`. This code runs in the container runtime and manages the package lifecycle.
The squashfs is mounted at `/usr/lib/startos/package/` and the runtime loads `index.js`.
### images/
Container images organized by architecture:
- **`<image-id>.squashfs`** - Container root filesystem
- **`<image-id>.json`** - Image metadata (entrypoint, user, workdir, etc.)
- **`<image-id>.env`** - Environment variables for the container
Images are built from Docker/Podman and converted to squashfs. The `ImageConfig` in manifest specifies:
- `arch` - Supported architectures
- `emulateMissingAs` - Fallback architecture for emulation
- `nvidiaContainer` - Whether to enable NVIDIA container support
### assets.squashfs
Static assets accessible to the package, mounted read-only at `/media/startos/assets/` in the container.
### dependencies/
Metadata for dependencies displayed in the UI:
- `metadata.json` - Just title for now
- `icon.<ext>` - Icon for the dependency
## Merkle Archive
The S9PK uses a merkle tree structure where each file and directory has a blake3 hash. This enables:
1. **Partial downloads** - Download and verify individual files
2. **Integrity verification** - Verify any subset of the archive
3. **Efficient updates** - Only download changed portions
4. **DOS protection** - Size limits enforced before downloading content
Files are sorted by priority for streaming (manifest first, then icon, license, dependencies, javascript, assets, images).
## Building S9PK
Use `start-cli s9pk pack` to build packages:
```bash
start-cli s9pk pack <manifest-path> -o <output.s9pk>
```
Images can be sourced from:
- Docker/Podman build (`--docker-build`)
- Existing Docker tag (`--docker-tag`)
- Pre-built squashfs files
## Related Code
- `core/src/s9pk/v2/mod.rs` - S9pk struct and serialization
- `core/src/s9pk/v2/manifest.rs` - Manifest types
- `core/src/s9pk/v2/pack.rs` - Packing logic
- `core/src/s9pk/merkle_archive/` - Merkle archive implementation

View File

@@ -46,7 +46,6 @@ openssh-server
podman podman
psmisc psmisc
qemu-guest-agent qemu-guest-agent
qemu-user-static
rfkill rfkill
rsync rsync
samba-common-bin samba-common-bin

View File

@@ -111,6 +111,6 @@ if [ "$CHROOT_RES" -eq 0 ]; then
reboot reboot
fi fi
umount /media/startos/next umount -R /media/startos/next
umount /media/startos/upper umount /media/startos/upper
rm -rf /media/startos/upper /media/startos/next rm -rf /media/startos/upper /media/startos/next

View File

@@ -15,12 +15,13 @@ if [ "$SKIP_DL" != "1" ]; then
fi fi
if [ -n "$RUN_ID" ]; then if [ -n "$RUN_ID" ]; then
for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do for arch in aarch64 aarch64-nonfree riscv64 riscv64-nonfree x86_64 x86_64-nonfree raspberrypi; do
while ! gh run download -R Start9Labs/start-os $RUN_ID -n $arch.squashfs -D $(pwd); do sleep 1; done while ! gh run download -R Start9Labs/start-os $RUN_ID -n $arch.squashfs -D $(pwd); do sleep 1; done
done done
for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do for arch in aarch64 aarch64-nonfree riscv64 riscv64-nonfree x86_64 x86_64-nonfree; do
while ! gh run download -R Start9Labs/start-os $RUN_ID -n $arch.iso -D $(pwd); do sleep 1; done while ! gh run download -R Start9Labs/start-os $RUN_ID -n $arch.iso -D $(pwd); do sleep 1; done
done done
while ! gh run download -R Start9Labs/start-os $RUN_ID -n raspberrypi.img -D $(pwd); do sleep 1; done
fi fi
if [ -n "$ST_RUN_ID" ]; then if [ -n "$ST_RUN_ID" ]; then
@@ -56,23 +57,31 @@ start-cli --registry=https://alpha-registry-x.start9.com registry os version add
if [ "$SKIP_UL" = "2" ]; then if [ "$SKIP_UL" = "2" ]; then
exit 2 exit 2
elif [ "$SKIP_UL" != "1" ]; then elif [ "$SKIP_UL" != "1" ]; then
for file in *.deb start-cli_*; do for file in *.squashfs *.iso *.deb start-cli_*; do
gh release upload -R Start9Labs/start-os v$VERSION $file gh release upload -R Start9Labs/start-os v$VERSION $file
done done
for file in *.iso *.squashfs; do for file in *.img; do
s3cmd put -P $file s3://startos-images/v$VERSION/$file if ! [ -f $file.gz ]; then
cat $file | pigz > $file.gz
fi
gh release upload -R Start9Labs/start-os v$VERSION $file.gz
done done
fi fi
if [ "$SKIP_INDEX" != "1" ]; then if [ "$SKIP_INDEX" != "1" ]; then
for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do for arch in aarch64 aarch64-nonfree riscv64 riscv64-nonfree x86_64 x86_64-nonfree; do
for file in *_$arch.squashfs *_$arch.iso; do for file in *_$arch.squashfs *_$arch.iso; do
start-cli --registry=https://alpha-registry-x.start9.com registry os asset add --platform=$arch --version=$VERSION $file https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$file start-cli --registry=https://alpha-registry-x.start9.com registry os asset add --platform=$arch --version=$VERSION $file https://github.com/Start9Labs/start-os/releases/download/v$VERSION/$(echo -n "$file" | sed 's/~/./g')
done
done
for arch in raspberrypi; do
for file in *_$arch.squashfs; do
start-cli --registry=https://alpha-registry-x.start9.com registry os asset add --platform=$arch --version=$VERSION $file https://github.com/Start9Labs/start-os/releases/download/v$VERSION/$(echo -n "$file" | sed 's/~/./g')
done done
done done
fi fi
for file in *.iso *.squashfs *.deb start-cli_*; do for file in *.iso *.img *.img.gz *.squashfs *.deb start-cli_*; do
gpg -u 7CFFDA41CA66056A --detach-sign --armor -o "${file}.asc" "$file" gpg -u 7CFFDA41CA66056A --detach-sign --armor -o "${file}.asc" "$file"
done done
@@ -81,30 +90,20 @@ tar -czvf signatures.tar.gz *.asc
gh release upload -R Start9Labs/start-os v$VERSION signatures.tar.gz gh release upload -R Start9Labs/start-os v$VERSION signatures.tar.gz
cat << EOF
# ISO Downloads
- [x86_64/AMD64](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_x86_64-nonfree.iso))
- [x86_64/AMD64-slim (FOSS-only)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_x86_64.iso) "Without proprietary software or drivers")
- [aarch64/ARM64](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_aarch64-nonfree.iso))
- [aarch64/ARM64-slim (FOSS-Only)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_aarch64.iso) "Without proprietary software or drivers")
- [RISCV64 (RVA23)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_riscv64.iso))
EOF
cat << 'EOF' cat << 'EOF'
# StartOS Checksums # StartOS Checksums
## SHA-256 ## SHA-256
``` ```
EOF EOF
sha256sum *.iso *.squashfs sha256sum *.iso *.img *img.gz *.squashfs
cat << 'EOF' cat << 'EOF'
``` ```
## BLAKE-3 ## BLAKE-3
``` ```
EOF EOF
b3sum *.iso *.squashfs b3sum *.iso *.img *.img.gz *.squashfs
cat << 'EOF' cat << 'EOF'
``` ```
@@ -139,4 +138,5 @@ EOF
b3sum start-cli_* b3sum start-cli_*
cat << 'EOF' cat << 'EOF'
``` ```
EOF EOF

View File

@@ -1,21 +1,16 @@
# Container RPC Server Specification # Container RPC SERVER Specification
The container runtime exposes a JSON-RPC server over a Unix socket at `/media/startos/rpc/service.sock`.
## Methods ## Methods
### init ### init
Initialize the runtime and system. initialize runtime (mount `/proc`, `/sys`, `/dev`, and `/run` to each image in `/media/images`)
#### params called after os has mounted js and images to the container
```ts #### args
{
id: string, `[]`
kind: "install" | "update" | "restore" | null,
}
```
#### response #### response
@@ -23,16 +18,11 @@ Initialize the runtime and system.
### exit ### exit
Shutdown runtime and optionally run exit hooks for a target version. shutdown runtime
#### params #### args
```ts `[]`
{
id: string,
target: string | null, // ExtendedVersion or VersionRange
}
```
#### response #### response
@@ -40,11 +30,11 @@ Shutdown runtime and optionally run exit hooks for a target version.
### start ### start
Run main method if not already running. run main method if not already running
#### params #### args
None `[]`
#### response #### response
@@ -52,11 +42,11 @@ None
### stop ### stop
Stop main method by sending SIGTERM to child processes, and SIGKILL after timeout. stop main method by sending SIGTERM to child processes, and SIGKILL after timeout
#### params #### args
None `{ timeout: millis }`
#### response #### response
@@ -64,16 +54,15 @@ None
### execute ### execute
Run a specific package procedure. run a specific package procedure
#### params #### args
```ts ```ts
{ {
id: string, // event ID procedure: JsonPath,
procedure: string, // JSON path (e.g., "/backup/create", "/actions/{name}/run") input: any,
input: any, timeout: millis,
timeout: number | null,
} }
``` ```
@@ -83,64 +72,18 @@ Run a specific package procedure.
### sandbox ### sandbox
Run a specific package procedure in sandbox mode. Same interface as `execute`. run a specific package procedure in sandbox mode
UNIMPLEMENTED: this feature is planned but does not exist #### args
#### params
```ts ```ts
{ {
id: string, procedure: JsonPath,
procedure: string, input: any,
input: any, timeout: millis,
timeout: number | null,
} }
``` ```
#### response #### response
`any` `any`
### callback
Handle a callback from an effect.
#### params
```ts
{
id: number,
args: any[],
}
```
#### response
`null` (no response sent)
### eval
Evaluate a script in the runtime context. Used for debugging.
#### params
```ts
{
script: string,
}
```
#### response
`any`
## Procedures
The `execute` and `sandbox` methods route to procedures based on the `procedure` path:
| Procedure | Description |
|-----------|-------------|
| `/backup/create` | Create a backup |
| `/actions/{name}/getInput` | Get input spec for an action |
| `/actions/{name}/run` | Run an action with input |

2
core/Cargo.lock generated
View File

@@ -7817,7 +7817,7 @@ dependencies = [
[[package]] [[package]]
name = "start-os" name = "start-os"
version = "0.4.0-alpha.19" version = "0.4.0-alpha.18"
dependencies = [ dependencies = [
"aes 0.7.5", "aes 0.7.5",
"arti-client", "arti-client",

View File

@@ -15,7 +15,7 @@ license = "MIT"
name = "start-os" name = "start-os"
readme = "README.md" readme = "README.md"
repository = "https://github.com/Start9Labs/start-os" repository = "https://github.com/Start9Labs/start-os"
version = "0.4.0-alpha.19" # VERSION_BUMP version = "0.4.0-alpha.18" # VERSION_BUMP
[lib] [lib]
name = "startos" name = "startos"
@@ -176,7 +176,6 @@ mio = "1"
new_mime_guess = "4" new_mime_guess = "4"
nix = { version = "0.30.1", features = [ nix = { version = "0.30.1", features = [
"fs", "fs",
"hostname",
"mount", "mount",
"net", "net",
"process", "process",

View File

@@ -1843,18 +1843,18 @@ service.mod.failed-to-parse-package-data-entry:
pl_PL: "Nie udało się przeanalizować PackageDataEntry, znaleziono: %{error}" pl_PL: "Nie udało się przeanalizować PackageDataEntry, znaleziono: %{error}"
service.mod.no-matching-subcontainers: service.mod.no-matching-subcontainers:
en_US: "no matching subcontainers are running for %{id}; some possible choices are:" en_US: "no matching subcontainers are running for %{id}; some possible choices are:\n%{subcontainers}"
de_DE: "keine passenden Subcontainer laufen für %{id}; einige mögliche Optionen sind:" de_DE: "keine passenden Subcontainer laufen für %{id}; einige mögliche Optionen sind:\n%{subcontainers}"
es_ES: "no hay subcontenedores coincidentes ejecutándose para %{id}; algunas opciones posibles son:" es_ES: "no hay subcontenedores coincidentes ejecutándose para %{id}; algunas opciones posibles son:\n%{subcontainers}"
fr_FR: "aucun sous-conteneur correspondant n'est en cours d'exécution pour %{id} ; voici quelques choix possibles :" fr_FR: "aucun sous-conteneur correspondant n'est en cours d'exécution pour %{id} ; voici quelques choix possibles :\n%{subcontainers}"
pl_PL: "nie działają pasujące podkontenery dla %{id}; niektóre możliwe wybory to:" pl_PL: "nie działają pasujące podkontenery dla %{id}; niektóre możliwe wybory to:\n%{subcontainers}"
service.mod.multiple-subcontainers-found: service.mod.multiple-subcontainers-found:
en_US: "multiple subcontainers found for %{id}" en_US: "multiple subcontainers found for %{id}: \n%{subcontainer_ids}"
de_DE: "mehrere Subcontainer für %{id} gefunden" de_DE: "mehrere Subcontainer für %{id} gefunden: \n%{subcontainer_ids}"
es_ES: "se encontraron múltiples subcontenedores para %{id}" es_ES: "se encontraron múltiples subcontenedores para %{id}: \n%{subcontainer_ids}"
fr_FR: "plusieurs sous-conteneurs trouvés pour %{id}" fr_FR: "plusieurs sous-conteneurs trouvés pour %{id} : \n%{subcontainer_ids}"
pl_PL: "znaleziono wiele podkontenerów dla %{id}" pl_PL: "znaleziono wiele podkontenerów dla %{id}: \n%{subcontainer_ids}"
service.mod.invalid-byte-length-for-signal: service.mod.invalid-byte-length-for-signal:
en_US: "invalid byte length for signal: %{length}" en_US: "invalid byte length for signal: %{length}"
@@ -3703,20 +3703,6 @@ help.arg.wireguard-config:
fr_FR: "Configuration WireGuard" fr_FR: "Configuration WireGuard"
pl_PL: "Konfiguracja WireGuard" pl_PL: "Konfiguracja WireGuard"
help.s9pk-s3base:
en_US: "Base URL for publishing s9pks"
de_DE: "Basis-URL für die Veröffentlichung von s9pks"
es_ES: "URL base para publicar s9pks"
fr_FR: "URL de base pour publier les s9pks"
pl_PL: "Bazowy URL do publikowania s9pks"
help.s9pk-s3bucket:
en_US: "S3 bucket to publish s9pks to (should correspond to s3base)"
de_DE: "S3-Bucket zum Veröffentlichen von s9pks (sollte mit s3base übereinstimmen)"
es_ES: "Bucket S3 para publicar s9pks (debe corresponder con s3base)"
fr_FR: "Bucket S3 pour publier les s9pks (doit correspondre à s3base)"
pl_PL: "Bucket S3 do publikowania s9pks (powinien odpowiadać s3base)"
# CLI command descriptions (about.*) # CLI command descriptions (about.*)
about.add-address-to-host: about.add-address-to-host:
en_US: "Add an address to this host" en_US: "Add an address to this host"
@@ -4880,13 +4866,6 @@ about.persist-new-notification:
fr_FR: "Persister une nouvelle notification" fr_FR: "Persister une nouvelle notification"
pl_PL: "Utrwal nowe powiadomienie" pl_PL: "Utrwal nowe powiadomienie"
about.publish-s9pk:
en_US: "Publish s9pk to S3 bucket and index on registry"
de_DE: "S9pk in S3-Bucket veröffentlichen und in Registry indizieren"
es_ES: "Publicar s9pk en bucket S3 e indexar en el registro"
fr_FR: "Publier s9pk dans le bucket S3 et indexer dans le registre"
pl_PL: "Opublikuj s9pk do bucketu S3 i zindeksuj w rejestrze"
about.rebuild-service-container: about.rebuild-service-container:
en_US: "Rebuild service container" en_US: "Rebuild service container"
de_DE: "Dienst-Container neu erstellen" de_DE: "Dienst-Container neu erstellen"

View File

@@ -180,13 +180,7 @@ pub async fn update(
.as_idx_mut(&id) .as_idx_mut(&id)
.ok_or_else(|| { .ok_or_else(|| {
Error::new( Error::new(
eyre!( eyre!("{}", t!("backup.target.cifs.target-not-found", id = BackupTargetId::Cifs { id })),
"{}",
t!(
"backup.target.cifs.target-not-found",
id = BackupTargetId::Cifs { id }
)
),
ErrorKind::NotFound, ErrorKind::NotFound,
) )
})? })?

View File

@@ -1,7 +1,10 @@
use rust_i18n::t; use rust_i18n::t;
pub fn renamed(old: &str, new: &str) -> ! { pub fn renamed(old: &str, new: &str) -> ! {
eprintln!("{}", t!("bins.deprecated.renamed", old = old, new = new)); eprintln!(
"{}",
t!("bins.deprecated.renamed", old = old, new = new)
);
std::process::exit(1) std::process::exit(1)
} }

View File

@@ -4,8 +4,8 @@ use std::time::Duration;
use clap::Parser; use clap::Parser;
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
use futures::{FutureExt, TryFutureExt};
use rust_i18n::t; use rust_i18n::t;
use futures::{FutureExt, TryFutureExt};
use tokio::signal::unix::signal; use tokio::signal::unix::signal;
use tracing::instrument; use tracing::instrument;

View File

@@ -38,8 +38,6 @@ pub struct CliContextSeed {
pub registry_url: Option<Url>, pub registry_url: Option<Url>,
pub registry_hostname: Vec<InternedString>, pub registry_hostname: Vec<InternedString>,
pub registry_listen: Option<SocketAddr>, pub registry_listen: Option<SocketAddr>,
pub s9pk_s3base: Option<Url>,
pub s9pk_s3bucket: Option<InternedString>,
pub tunnel_addr: Option<SocketAddr>, pub tunnel_addr: Option<SocketAddr>,
pub tunnel_listen: Option<SocketAddr>, pub tunnel_listen: Option<SocketAddr>,
pub client: Client, pub client: Client,
@@ -131,8 +129,6 @@ impl CliContext {
.transpose()?, .transpose()?,
registry_hostname: config.registry_hostname.unwrap_or_default(), registry_hostname: config.registry_hostname.unwrap_or_default(),
registry_listen: config.registry_listen, registry_listen: config.registry_listen,
s9pk_s3base: config.s9pk_s3base,
s9pk_s3bucket: config.s9pk_s3bucket,
tunnel_addr: config.tunnel, tunnel_addr: config.tunnel,
tunnel_listen: config.tunnel_listen, tunnel_listen: config.tunnel_listen,
client: { client: {
@@ -164,23 +160,21 @@ impl CliContext {
if !path.exists() { if !path.exists() {
continue; continue;
} }
let pair = let pair = <ed25519::KeypairBytes as ed25519::pkcs8::DecodePrivateKey>::from_pkcs8_pem(
<ed25519::KeypairBytes as ed25519::pkcs8::DecodePrivateKey>::from_pkcs8_pem( &std::fs::read_to_string(path)?,
&std::fs::read_to_string(path)?, )
.with_kind(crate::ErrorKind::Pem)?;
let secret = ed25519_dalek::SecretKey::try_from(&pair.secret_key[..]).map_err(|_| {
Error::new(
eyre!("{}", t!("context.cli.pkcs8-key-incorrect-length")),
ErrorKind::OpenSsl,
) )
.with_kind(crate::ErrorKind::Pem)?; })?;
let secret = return Ok(secret.into())
ed25519_dalek::SecretKey::try_from(&pair.secret_key[..]).map_err(|_| {
Error::new(
eyre!("{}", t!("context.cli.pkcs8-key-incorrect-length")),
ErrorKind::OpenSsl,
)
})?;
return Ok(secret.into());
} }
Err(Error::new( Err(Error::new(
eyre!("{}", t!("context.cli.developer-key-does-not-exist")), eyre!("{}", t!("context.cli.developer-key-does-not-exist")),
crate::ErrorKind::Uninitialized, crate::ErrorKind::Uninitialized
)) ))
}) })
} }
@@ -201,12 +195,8 @@ impl CliContext {
.into()); .into());
} }
}; };
url.set_scheme(ws_scheme).map_err(|_| { url.set_scheme(ws_scheme)
Error::new( .map_err(|_| Error::new(eyre!("{}", t!("context.cli.cannot-set-url-scheme")), crate::ErrorKind::ParseUrl))?;
eyre!("{}", t!("context.cli.cannot-set-url-scheme")),
crate::ErrorKind::ParseUrl,
)
})?;
url.path_segments_mut() url.path_segments_mut()
.map_err(|_| eyre!("Url cannot be base")) .map_err(|_| eyre!("Url cannot be base"))
.with_kind(crate::ErrorKind::ParseUrl)? .with_kind(crate::ErrorKind::ParseUrl)?

View File

@@ -68,10 +68,6 @@ pub struct ClientConfig {
pub registry_hostname: Option<Vec<InternedString>>, pub registry_hostname: Option<Vec<InternedString>>,
#[arg(skip)] #[arg(skip)]
pub registry_listen: Option<SocketAddr>, pub registry_listen: Option<SocketAddr>,
#[arg(long, help = "help.s9pk-s3base")]
pub s9pk_s3base: Option<Url>,
#[arg(long, help = "help.s9pk-s3bucket")]
pub s9pk_s3bucket: Option<InternedString>,
#[arg(short = 't', long, help = "help.arg.tunnel-address")] #[arg(short = 't', long, help = "help.arg.tunnel-address")]
pub tunnel: Option<SocketAddr>, pub tunnel: Option<SocketAddr>,
#[arg(skip)] #[arg(skip)]
@@ -93,13 +89,8 @@ impl ContextConfig for ClientConfig {
self.host = self.host.take().or(other.host); self.host = self.host.take().or(other.host);
self.registry = self.registry.take().or(other.registry); self.registry = self.registry.take().or(other.registry);
self.registry_hostname = self.registry_hostname.take().or(other.registry_hostname); self.registry_hostname = self.registry_hostname.take().or(other.registry_hostname);
self.registry_listen = self.registry_listen.take().or(other.registry_listen);
self.s9pk_s3base = self.s9pk_s3base.take().or(other.s9pk_s3base);
self.s9pk_s3bucket = self.s9pk_s3bucket.take().or(other.s9pk_s3bucket);
self.tunnel = self.tunnel.take().or(other.tunnel); self.tunnel = self.tunnel.take().or(other.tunnel);
self.tunnel_listen = self.tunnel_listen.take().or(other.tunnel_listen);
self.proxy = self.proxy.take().or(other.proxy); self.proxy = self.proxy.take().or(other.proxy);
self.socks_listen = self.socks_listen.take().or(other.socks_listen);
self.cookie_path = self.cookie_path.take().or(other.cookie_path); self.cookie_path = self.cookie_path.take().or(other.cookie_path);
self.developer_key_path = self.developer_key_path.take().or(other.developer_key_path); self.developer_key_path = self.developer_key_path.take().or(other.developer_key_path);
} }

View File

@@ -27,10 +27,7 @@ impl DiagnosticContext {
disk_guid: Option<InternedString>, disk_guid: Option<InternedString>,
error: Error, error: Error,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
tracing::error!( tracing::error!("{}", t!("context.diagnostic.starting-diagnostic-ui", error = error));
"{}",
t!("context.diagnostic.starting-diagnostic-ui", error = error)
);
tracing::debug!("{:?}", error); tracing::debug!("{:?}", error);
let (shutdown, _) = tokio::sync::broadcast::channel(1); let (shutdown, _) = tokio::sync::broadcast::channel(1);

View File

@@ -463,10 +463,7 @@ impl RpcContext {
.await .await
.result .result
{ {
tracing::error!( tracing::error!("{}", t!("context.rpc.error-in-session-cleanup-cron", error = e));
"{}",
t!("context.rpc.error-in-session-cleanup-cron", error = e)
);
tracing::debug!("{e:?}"); tracing::debug!("{e:?}");
} }
} }
@@ -579,7 +576,6 @@ impl RpcContext {
pub async fn call_remote<RemoteContext>( pub async fn call_remote<RemoteContext>(
&self, &self,
method: &str, method: &str,
metadata: OrdMap<&'static str, Value>,
params: Value, params: Value,
) -> Result<Value, RpcError> ) -> Result<Value, RpcError>
where where
@@ -588,7 +584,7 @@ impl RpcContext {
<Self as CallRemote<RemoteContext, Empty>>::call_remote( <Self as CallRemote<RemoteContext, Empty>>::call_remote(
&self, &self,
method, method,
metadata, OrdMap::new(),
params, params,
Empty {}, Empty {},
) )
@@ -597,15 +593,20 @@ impl RpcContext {
pub async fn call_remote_with<RemoteContext, T>( pub async fn call_remote_with<RemoteContext, T>(
&self, &self,
method: &str, method: &str,
metadata: OrdMap<&'static str, Value>,
params: Value, params: Value,
extra: T, extra: T,
) -> Result<Value, RpcError> ) -> Result<Value, RpcError>
where where
Self: CallRemote<RemoteContext, T>, Self: CallRemote<RemoteContext, T>,
{ {
<Self as CallRemote<RemoteContext, T>>::call_remote(&self, method, metadata, params, extra) <Self as CallRemote<RemoteContext, T>>::call_remote(
.await &self,
method,
OrdMap::new(),
params,
extra,
)
.await
} }
} }
impl AsRef<Client> for RpcContext { impl AsRef<Client> for RpcContext {

View File

@@ -87,11 +87,7 @@ pub enum RevisionsRes {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")] #[command(rename_all = "kebab-case")]
pub struct CliDumpParams { pub struct CliDumpParams {
#[arg( #[arg(long = "include-private", short = 'p', help = "help.arg.include-private-data")]
long = "include-private",
short = 'p',
help = "help.arg.include-private-data"
)]
#[serde(default)] #[serde(default)]
include_private: bool, include_private: bool,
#[arg(help = "help.arg.db-path")] #[arg(help = "help.arg.db-path")]

View File

@@ -70,20 +70,12 @@ async fn e2fsck_runner(
if code & 4 != 0 { if code & 4 != 0 {
tracing::error!( tracing::error!(
"{}", "{}",
t!( t!("disk.fsck.errors-not-corrected", device = logicalname.as_ref().display(), stderr = e2fsck_stderr),
"disk.fsck.errors-not-corrected",
device = logicalname.as_ref().display(),
stderr = e2fsck_stderr
),
); );
} else if code & 1 != 0 { } else if code & 1 != 0 {
tracing::warn!( tracing::warn!(
"{}", "{}",
t!( t!("disk.fsck.errors-corrected", device = logicalname.as_ref().display(), stderr = e2fsck_stderr),
"disk.fsck.errors-corrected",
device = logicalname.as_ref().display(),
stderr = e2fsck_stderr
),
); );
} }
if code < 8 { if code < 8 {

View File

@@ -29,31 +29,25 @@ impl Default for FileType {
pub struct Bind<Src: AsRef<Path>> { pub struct Bind<Src: AsRef<Path>> {
src: Src, src: Src,
filetype: FileType, filetype: FileType,
recursive: bool,
} }
impl<Src: AsRef<Path>> Bind<Src> { impl<Src: AsRef<Path>> Bind<Src> {
pub fn new(src: Src) -> Self { pub fn new(src: Src) -> Self {
Self { Self {
src, src,
filetype: FileType::Directory, filetype: FileType::Directory,
recursive: false,
} }
} }
pub fn with_type(mut self, filetype: FileType) -> Self { pub fn with_type(mut self, filetype: FileType) -> Self {
self.filetype = filetype; self.filetype = filetype;
self self
} }
pub fn recursive(mut self, recursive: bool) -> Self {
self.recursive = recursive;
self
}
} }
impl<Src: AsRef<Path> + Send + Sync> FileSystem for Bind<Src> { impl<Src: AsRef<Path> + Send + Sync> FileSystem for Bind<Src> {
async fn source(&self) -> Result<Option<impl AsRef<Path>>, Error> { async fn source(&self) -> Result<Option<impl AsRef<Path>>, Error> {
Ok(Some(&self.src)) Ok(Some(&self.src))
} }
fn extra_args(&self) -> impl IntoIterator<Item = impl AsRef<std::ffi::OsStr>> { fn extra_args(&self) -> impl IntoIterator<Item = impl AsRef<std::ffi::OsStr>> {
[if self.recursive { "--rbind" } else { "--bind" }] ["--bind"]
} }
async fn pre_mount(&self, mountpoint: &Path, mount_type: MountType) -> Result<(), Error> { async fn pre_mount(&self, mountpoint: &Path, mount_type: MountType) -> Result<(), Error> {
let from_meta = tokio::fs::metadata(&self.src).await.ok(); let from_meta = tokio::fs::metadata(&self.src).await.ok();

View File

@@ -24,11 +24,7 @@ pub async fn bind<P0: AsRef<Path>, P1: AsRef<Path>>(
) -> Result<(), Error> { ) -> Result<(), Error> {
tracing::info!( tracing::info!(
"{}", "{}",
t!( t!("disk.mount.binding", src = src.as_ref().display(), dst = dst.as_ref().display())
"disk.mount.binding",
src = src.as_ref().display(),
dst = dst.as_ref().display()
)
); );
if is_mountpoint(&dst).await? { if is_mountpoint(&dst).await? {
unmount(dst.as_ref(), true).await?; unmount(dst.as_ref(), true).await?;

View File

@@ -4,7 +4,7 @@ use axum::http::StatusCode;
use axum::http::uri::InvalidUri; use axum::http::uri::InvalidUri;
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
use num_enum::TryFromPrimitive; use num_enum::TryFromPrimitive;
use patch_db::Value; use patch_db::Revision;
use rpc_toolkit::reqwest; use rpc_toolkit::reqwest;
use rpc_toolkit::yajrc::{ use rpc_toolkit::yajrc::{
INVALID_PARAMS_ERROR, INVALID_REQUEST_ERROR, METHOD_NOT_FOUND_ERROR, PARSE_ERROR, RpcError, INVALID_PARAMS_ERROR, INVALID_REQUEST_ERROR, METHOD_NOT_FOUND_ERROR, PARSE_ERROR, RpcError,
@@ -16,7 +16,6 @@ use tokio_rustls::rustls;
use ts_rs::TS; use ts_rs::TS;
use crate::InvalidId; use crate::InvalidId;
use crate::prelude::to_value;
#[derive(Debug, Clone, Copy, PartialEq, Eq, TryFromPrimitive)] #[derive(Debug, Clone, Copy, PartialEq, Eq, TryFromPrimitive)]
#[repr(i32)] #[repr(i32)]
@@ -184,8 +183,7 @@ impl ErrorKind {
UpdateFailed => t!("error.update-failed"), UpdateFailed => t!("error.update-failed"),
Smtp => t!("error.smtp"), Smtp => t!("error.smtp"),
SetSysInfo => t!("error.set-sys-info"), SetSysInfo => t!("error.set-sys-info"),
} }.to_string()
.to_string()
} }
} }
impl Display for ErrorKind { impl Display for ErrorKind {
@@ -198,7 +196,7 @@ pub struct Error {
pub source: color_eyre::eyre::Error, pub source: color_eyre::eyre::Error,
pub debug: Option<color_eyre::eyre::Error>, pub debug: Option<color_eyre::eyre::Error>,
pub kind: ErrorKind, pub kind: ErrorKind,
pub info: Value, pub revision: Option<Revision>,
pub task: Option<JoinHandle<()>>, pub task: Option<JoinHandle<()>>,
} }
@@ -229,7 +227,7 @@ impl Error {
source: source.into(), source: source.into(),
debug, debug,
kind, kind,
info: Value::Null, revision: None,
task: None, task: None,
} }
} }
@@ -238,7 +236,7 @@ impl Error {
source: eyre!("{}", self.source), source: eyre!("{}", self.source),
debug: self.debug.as_ref().map(|e| eyre!("{e}")), debug: self.debug.as_ref().map(|e| eyre!("{e}")),
kind: self.kind, kind: self.kind,
info: self.info.clone(), revision: self.revision.clone(),
task: None, task: None,
} }
} }
@@ -246,10 +244,6 @@ impl Error {
self.task = Some(task); self.task = Some(task);
self self
} }
pub fn with_info(mut self, info: Value) -> Self {
self.info = info;
self
}
pub async fn wait(mut self) -> Self { pub async fn wait(mut self) -> Self {
if let Some(task) = &mut self.task { if let Some(task) = &mut self.task {
task.await.log_err(); task.await.log_err();
@@ -428,8 +422,6 @@ impl From<patch_db::value::Error> for Error {
pub struct ErrorData { pub struct ErrorData {
pub details: String, pub details: String,
pub debug: String, pub debug: String,
#[serde(default)]
pub info: Value,
} }
impl Display for ErrorData { impl Display for ErrorData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@@ -447,7 +439,6 @@ impl From<Error> for ErrorData {
Self { Self {
details: value.to_string(), details: value.to_string(),
debug: format!("{:?}", value), debug: format!("{:?}", value),
info: value.info,
} }
} }
} }
@@ -478,31 +469,40 @@ impl From<&RpcError> for ErrorData {
.or_else(|| d.as_str().map(|s| s.to_owned())) .or_else(|| d.as_str().map(|s| s.to_owned()))
}) })
.unwrap_or_else(|| value.message.clone().into_owned()), .unwrap_or_else(|| value.message.clone().into_owned()),
info: to_value(
&value
.data
.as_ref()
.and_then(|d| d.as_object().and_then(|d| d.get("info"))),
)
.unwrap_or_default(),
} }
} }
} }
impl From<Error> for RpcError { impl From<Error> for RpcError {
fn from(e: Error) -> Self { fn from(e: Error) -> Self {
let kind = e.kind; let mut data_object = serde_json::Map::with_capacity(3);
let data = ErrorData::from(e); data_object.insert("details".to_owned(), format!("{}", e.source).into());
RpcError { data_object.insert("debug".to_owned(), format!("{:?}", e.source).into());
code: kind as i32, data_object.insert(
message: kind.as_str().into(), "revision".to_owned(),
data: Some(match serde_json::to_value(&data) { match serde_json::to_value(&e.revision) {
Ok(a) => a, Ok(a) => a,
Err(e) => { Err(e) => {
tracing::warn!("Error serializing ErrorData object: {}", e); tracing::warn!("Error serializing revision for Error object: {}", e);
serde_json::Value::Null serde_json::Value::Null
} }
}), },
);
RpcError {
code: e.kind as i32,
message: e.kind.as_str().into(),
data: Some(
match serde_json::to_value(&ErrorData {
details: format!("{}", e.source),
debug: format!("{:?}", e.source),
}) {
Ok(a) => a,
Err(e) => {
tracing::warn!("Error serializing revision for Error object: {}", e);
serde_json::Value::Null
}
},
),
} }
} }
} }
@@ -605,7 +605,7 @@ where
kind, kind,
source, source,
debug, debug,
info: Value::Null, revision: None,
task: None, task: None,
} }
}) })

View File

@@ -131,9 +131,6 @@ pub async fn install(
let package: GetPackageResponse = from_value( let package: GetPackageResponse = from_value(
ctx.call_remote_with::<RegistryContext, _>( ctx.call_remote_with::<RegistryContext, _>(
"package.get", "package.get",
[("get_device_info", Value::Bool(true))]
.into_iter()
.collect(),
json!({ json!({
"id": id, "id": id,
"targetVersion": VersionRange::exactly(version.deref().clone()), "targetVersion": VersionRange::exactly(version.deref().clone()),

View File

@@ -540,10 +540,7 @@ pub fn package<C: Context>() -> ParentHandler<C> {
.with_about("about.execute-commands-container") .with_about("about.execute-commands-container")
.no_cli(), .no_cli(),
) )
.subcommand( .subcommand("attach", from_fn_async(service::cli_attach).no_display())
"attach",
from_fn_async_local(service::cli_attach).no_display(),
)
.subcommand( .subcommand(
"host", "host",
net::host::host_api::<C>().with_about("about.manage-network-hosts-package"), net::host::host_api::<C>().with_about("about.manage-network-hosts-package"),

View File

@@ -6,6 +6,7 @@ use std::str::FromStr;
use std::time::{Duration, UNIX_EPOCH}; use std::time::{Duration, UNIX_EPOCH};
use axum::extract::ws; use axum::extract::ws;
use crate::util::net::WebSocket;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use clap::builder::ValueParserFactory; use clap::builder::ValueParserFactory;
use clap::{Args, FromArgMatches, Parser}; use clap::{Args, FromArgMatches, Parser};
@@ -30,7 +31,6 @@ use crate::context::{CliContext, RpcContext};
use crate::error::ResultExt; use crate::error::ResultExt;
use crate::prelude::*; use crate::prelude::*;
use crate::rpc_continuations::{Guid, RpcContinuation, RpcContinuations}; use crate::rpc_continuations::{Guid, RpcContinuation, RpcContinuations};
use crate::util::net::WebSocket;
use crate::util::serde::Reversible; use crate::util::serde::Reversible;
use crate::util::{FromStrParser, Invoke}; use crate::util::{FromStrParser, Invoke};
@@ -330,22 +330,12 @@ pub struct LogsParams<Extra: FromArgMatches + Args = Empty> {
extra: Extra, extra: Extra,
#[arg(short = 'l', long = "limit", help = "help.arg.log-limit")] #[arg(short = 'l', long = "limit", help = "help.arg.log-limit")]
limit: Option<usize>, limit: Option<usize>,
#[arg( #[arg(short = 'c', long = "cursor", conflicts_with = "follow", help = "help.arg.log-cursor")]
short = 'c',
long = "cursor",
conflicts_with = "follow",
help = "help.arg.log-cursor"
)]
cursor: Option<String>, cursor: Option<String>,
#[arg(short = 'b', long = "boot", help = "help.arg.log-boot")] #[arg(short = 'b', long = "boot", help = "help.arg.log-boot")]
#[serde(default)] #[serde(default)]
boot: Option<BootIdentifier>, boot: Option<BootIdentifier>,
#[arg( #[arg(short = 'B', long = "before", conflicts_with = "follow", help = "help.arg.log-before")]
short = 'B',
long = "before",
conflicts_with = "follow",
help = "help.arg.log-before"
)]
#[serde(default)] #[serde(default)]
before: bool, before: bool,
} }
@@ -563,12 +553,10 @@ pub async fn journalctl(
follow_cmd.arg("--lines=0"); follow_cmd.arg("--lines=0");
} }
let mut child = follow_cmd.stdout(Stdio::piped()).spawn()?; let mut child = follow_cmd.stdout(Stdio::piped()).spawn()?;
let out = BufReader::new(child.stdout.take().ok_or_else(|| { let out =
Error::new( BufReader::new(child.stdout.take().ok_or_else(|| {
eyre!("{}", t!("logs.no-stdout-available")), Error::new(eyre!("{}", t!("logs.no-stdout-available")), crate::ErrorKind::Journald)
crate::ErrorKind::Journald, })?);
)
})?);
let journalctl_entries = LinesStream::new(out.lines()); let journalctl_entries = LinesStream::new(out.lines());
@@ -713,10 +701,7 @@ pub async fn follow_logs<Context: AsRef<RpcContinuations>>(
RpcContinuation::ws( RpcContinuation::ws(
move |socket| async move { move |socket| async move {
if let Err(e) = ws_handler(first_entry, stream, socket).await { if let Err(e) = ws_handler(first_entry, stream, socket).await {
tracing::error!( tracing::error!("{}", t!("logs.error-in-log-stream", error = e.to_string()));
"{}",
t!("logs.error-in-log-stream", error = e.to_string())
);
} }
}, },
Duration::from_secs(30), Duration::from_secs(30),

View File

@@ -40,10 +40,7 @@ impl LocalAuthContext for RpcContext {
} }
fn unauthorized() -> Error { fn unauthorized() -> Error {
Error::new( Error::new(eyre!("{}", t!("middleware.auth.unauthorized")), crate::ErrorKind::Authorization)
eyre!("{}", t!("middleware.auth.unauthorized")),
crate::ErrorKind::Authorization,
)
} }
async fn check_from_header<C: LocalAuthContext>(header: Option<&HeaderValue>) -> Result<(), Error> { async fn check_from_header<C: LocalAuthContext>(header: Option<&HeaderValue>) -> Result<(), Error> {

View File

@@ -244,10 +244,7 @@ impl ValidSessionToken {
C::access_sessions(db) C::access_sessions(db)
.as_idx_mut(session_hash) .as_idx_mut(session_hash)
.ok_or_else(|| { .ok_or_else(|| {
Error::new( Error::new(eyre!("{}", t!("middleware.auth.unauthorized")), crate::ErrorKind::Authorization)
eyre!("{}", t!("middleware.auth.unauthorized")),
crate::ErrorKind::Authorization,
)
})? })?
.mutate(|s| { .mutate(|s| {
s.last_active = Utc::now(); s.last_active = Utc::now();

View File

@@ -347,10 +347,6 @@ pub async fn call_remote<Ctx: SigningContext + AsRef<Client>>(
.with_kind(ErrorKind::Deserialization)? .with_kind(ErrorKind::Deserialization)?
.result .result
} }
_ => Err(Error::new( _ => Err(Error::new(eyre!("{}", t!("middleware.auth.unknown-content-type")), ErrorKind::Network).into()),
eyre!("{}", t!("middleware.auth.unknown-content-type")),
ErrorKind::Network,
)
.into()),
} }
} }

View File

@@ -47,13 +47,7 @@ impl Middleware<RpcContext> for SyncDb {
} }
.await .await
{ {
tracing::error!( tracing::error!("{}", t!("middleware.db.error-writing-patch-sequence-header", error = e));
"{}",
t!(
"middleware.db.error-writing-patch-sequence-header",
error = e
)
);
tracing::debug!("{e:?}"); tracing::debug!("{e:?}");
} }
} }

View File

@@ -240,13 +240,7 @@ impl PortForwardController {
} }
.await .await
{ {
tracing::error!( tracing::error!("{}", t!("net.forward.error-initializing-controller", error = format!("{e:#}")));
"{}",
t!(
"net.forward.error-initializing-controller",
error = format!("{e:#}")
)
);
tracing::debug!("{e:?}"); tracing::debug!("{e:?}");
tokio::time::sleep(Duration::from_secs(5)).await; tokio::time::sleep(Duration::from_secs(5)).await;
} }

View File

@@ -171,13 +171,16 @@ where
let mut tls_handler = self.tls_handler.clone(); let mut tls_handler = self.tls_handler.clone();
let mut fut = async move { let mut fut = async move {
let res = async { let res = async {
let mut acceptor = let mut acceptor = LazyConfigAcceptor::new(
LazyConfigAcceptor::new(Acceptor::default(), BackTrackingIO::new(stream)); Acceptor::default(),
BackTrackingIO::new(stream),
);
let mut mid: tokio_rustls::StartHandshake<BackTrackingIO<AcceptStream>> = let mut mid: tokio_rustls::StartHandshake<BackTrackingIO<AcceptStream>> =
match (&mut acceptor).await { match (&mut acceptor).await {
Ok(a) => a, Ok(a) => a,
Err(e) => { Err(e) => {
let mut stream = acceptor.take_io().or_not_found("acceptor io")?; let mut stream =
acceptor.take_io().or_not_found("acceptor io")?;
let (_, buf) = stream.rewind(); let (_, buf) = stream.rewind();
if std::str::from_utf8(buf) if std::str::from_utf8(buf)
.ok() .ok()

View File

@@ -324,12 +324,7 @@ pub async fn list_keys(ctx: RpcContext) -> Result<BTreeSet<OnionAddress>, Error>
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")] #[command(rename_all = "kebab-case")]
pub struct ResetParams { pub struct ResetParams {
#[arg( #[arg(name = "wipe-state", short = 'w', long = "wipe-state", help = "help.arg.wipe-tor-state")]
name = "wipe-state",
short = 'w',
long = "wipe-state",
help = "help.arg.wipe-tor-state"
)]
wipe_state: bool, wipe_state: bool,
} }

View File

@@ -351,12 +351,7 @@ pub async fn list_keys(ctx: RpcContext) -> Result<BTreeSet<OnionAddress>, Error>
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")] #[command(rename_all = "kebab-case")]
pub struct ResetParams { pub struct ResetParams {
#[arg( #[arg(name = "wipe-state", short = 'w', long = "wipe-state", help = "help.arg.wipe-tor-state")]
name = "wipe-state",
short = 'w',
long = "wipe-state",
help = "help.arg.wipe-tor-state"
)]
wipe_state: bool, wipe_state: bool,
#[arg(help = "help.arg.reset-reason")] #[arg(help = "help.arg.reset-reason")]
reason: String, reason: String,

View File

@@ -94,12 +94,7 @@ impl Model<BTreeMap<Guid, SignerInfo>> {
.next() .next()
.transpose()? .transpose()?
.map(|(a, _)| a) .map(|(a, _)| a)
.ok_or_else(|| { .ok_or_else(|| Error::new(eyre!("{}", t!("registry.admin.unknown-signer")), ErrorKind::Authorization))
Error::new(
eyre!("{}", t!("registry.admin.unknown-signer")),
ErrorKind::Authorization,
)
})
} }
pub fn get_signer_info(&self, key: &AnyVerifyingKey) -> Result<(Guid, SignerInfo), Error> { pub fn get_signer_info(&self, key: &AnyVerifyingKey) -> Result<(Guid, SignerInfo), Error> {
@@ -109,12 +104,7 @@ impl Model<BTreeMap<Guid, SignerInfo>> {
.filter_ok(|(_, s)| s.keys.contains(key)) .filter_ok(|(_, s)| s.keys.contains(key))
.next() .next()
.transpose()? .transpose()?
.ok_or_else(|| { .ok_or_else(|| Error::new(eyre!("{}", t!("registry.admin.unknown-signer")), ErrorKind::Authorization))
Error::new(
eyre!("{}", t!("registry.admin.unknown-signer")),
ErrorKind::Authorization,
)
})
} }
pub fn add_signer(&mut self, signer: &SignerInfo) -> Result<Guid, Error> { pub fn add_signer(&mut self, signer: &SignerInfo) -> Result<Guid, Error> {
@@ -129,11 +119,7 @@ impl Model<BTreeMap<Guid, SignerInfo>> {
return Err(Error::new( return Err(Error::new(
eyre!( eyre!(
"{}", "{}",
t!( t!("registry.admin.signer-already-exists", guid = guid, name = s.name)
"registry.admin.signer-already-exists",
guid = guid,
name = s.name
)
), ),
ErrorKind::InvalidRequest, ErrorKind::InvalidRequest,
)); ));

View File

@@ -44,11 +44,7 @@ const DEFAULT_REGISTRY_LISTEN: SocketAddr =
pub struct RegistryConfig { pub struct RegistryConfig {
#[arg(short = 'c', long = "config", help = "help.arg.config-file-path")] #[arg(short = 'c', long = "config", help = "help.arg.config-file-path")]
pub config: Option<PathBuf>, pub config: Option<PathBuf>,
#[arg( #[arg(short = 'l', long = "listen", help = "help.arg.registry-listen-address")]
short = 'l',
long = "listen",
help = "help.arg.registry-listen-address"
)]
pub registry_listen: Option<SocketAddr>, pub registry_listen: Option<SocketAddr>,
#[arg(short = 'H', long = "hostname", help = "help.arg.registry-hostname")] #[arg(short = 'H', long = "hostname", help = "help.arg.registry-hostname")]
pub registry_hostname: Vec<InternedString>, pub registry_hostname: Vec<InternedString>,
@@ -56,11 +52,7 @@ pub struct RegistryConfig {
pub tor_proxy: Option<Url>, pub tor_proxy: Option<Url>,
#[arg(short = 'd', long = "datadir", help = "help.arg.data-directory")] #[arg(short = 'd', long = "datadir", help = "help.arg.data-directory")]
pub datadir: Option<PathBuf>, pub datadir: Option<PathBuf>,
#[arg( #[arg(short = 'u', long = "pg-connection-url", help = "help.arg.postgres-connection-url")]
short = 'u',
long = "pg-connection-url",
help = "help.arg.postgres-connection-url"
)]
pub pg_connection_url: Option<String>, pub pg_connection_url: Option<String>,
} }
impl ContextConfig for RegistryConfig { impl ContextConfig for RegistryConfig {
@@ -203,11 +195,9 @@ impl CallRemote<RegistryContext> for CliContext {
.push("v0"); .push("v0");
url url
} else { } else {
return Err(Error::new( return Err(
eyre!("{}", t!("registry.context.registry-required")), Error::new(eyre!("{}", t!("registry.context.registry-required")), ErrorKind::InvalidRequest).into(),
ErrorKind::InvalidRequest, );
)
.into());
}; };
if let Ok(local) = cookie { if let Ok(local) = cookie {
@@ -341,10 +331,7 @@ impl SignatureAuthContext for RegistryContext {
} }
} }
Err(Error::new( Err(Error::new(eyre!("{}", t!("registry.context.unauthorized")), ErrorKind::Authorization))
eyre!("{}", t!("registry.context.unauthorized")),
ErrorKind::Authorization,
))
} }
async fn post_auth_hook( async fn post_auth_hook(
&self, &self,

View File

@@ -154,10 +154,7 @@ async fn add_asset(
})?; })?;
Ok(()) Ok(())
} else { } else {
Err(Error::new( Err(Error::new(eyre!("{}", t!("registry.os.asset.unauthorized")), ErrorKind::Authorization))
eyre!("{}", t!("registry.os.asset.unauthorized")),
ErrorKind::Authorization,
))
} }
}) })
.await .await
@@ -234,12 +231,10 @@ pub async fn cli_add_asset(
sign_phase.start(); sign_phase.start();
let blake3 = file.blake3_mmap().await?; let blake3 = file.blake3_mmap().await?;
let size = file.size().await.ok_or_else(|| { let size = file
Error::new( .size()
eyre!("{}", t!("registry.os.asset.failed-read-metadata")), .await
ErrorKind::Filesystem, .ok_or_else(|| Error::new(eyre!("{}", t!("registry.os.asset.failed-read-metadata")), ErrorKind::Filesystem))?;
)
})?;
let commitment = Blake3Commitment { let commitment = Blake3Commitment {
hash: Base64(*blake3.as_bytes()), hash: Base64(*blake3.as_bytes()),
size, size,
@@ -341,10 +336,7 @@ async fn remove_asset(
.remove(&platform)?; .remove(&platform)?;
Ok(()) Ok(())
} else { } else {
Err(Error::new( Err(Error::new(eyre!("{}", t!("registry.os.asset.unauthorized")), ErrorKind::Authorization))
eyre!("{}", t!("registry.os.asset.unauthorized")),
ErrorKind::Authorization,
))
} }
}) })
.await .await

View File

@@ -125,9 +125,17 @@ pub struct CliGetOsAssetParams {
pub version: Version, pub version: Version,
#[arg(help = "help.arg.platform")] #[arg(help = "help.arg.platform")]
pub platform: InternedString, pub platform: InternedString,
#[arg(long = "download", short = 'd', help = "help.arg.download-directory")] #[arg(
long = "download",
short = 'd',
help = "help.arg.download-directory"
)]
pub download: Option<PathBuf>, pub download: Option<PathBuf>,
#[arg(long = "reverify", short = 'r', help = "help.arg.reverify-hash")] #[arg(
long = "reverify",
short = 'r',
help = "help.arg.reverify-hash"
)]
pub reverify: bool, pub reverify: bool,
} }

View File

@@ -89,10 +89,7 @@ async fn sign_asset(
.contains(&guid) .contains(&guid)
{ {
return Err(Error::new( return Err(Error::new(
eyre!( eyre!("{}", t!("registry.os.asset.signer-not-authorized", guid = guid)),
"{}",
t!("registry.os.asset.signer-not-authorized", guid = guid)
),
ErrorKind::Authorization, ErrorKind::Authorization,
)); ));
} }
@@ -187,12 +184,10 @@ pub async fn cli_sign_asset(
sign_phase.start(); sign_phase.start();
let blake3 = file.blake3_mmap().await?; let blake3 = file.blake3_mmap().await?;
let size = file.size().await.ok_or_else(|| { let size = file
Error::new( .size()
eyre!("{}", t!("registry.os.asset.failed-read-metadata")), .await
ErrorKind::Filesystem, .ok_or_else(|| Error::new(eyre!("{}", t!("registry.os.asset.failed-read-metadata")), ErrorKind::Filesystem))?;
)
})?;
let commitment = Blake3Commitment { let commitment = Blake3Commitment {
hash: Base64(*blake3.as_bytes()), hash: Base64(*blake3.as_bytes()),
size, size,

View File

@@ -26,6 +26,7 @@ pub fn os_api<C: Context>() -> ParentHandler<C> {
) )
.subcommand( .subcommand(
"version", "version",
version::version_api::<C>().with_about("about.commands-add-remove-list-versions"), version::version_api::<C>()
.with_about("about.commands-add-remove-list-versions"),
) )
} }

View File

@@ -95,14 +95,7 @@ pub async fn remove_version_signer(
.mutate(|s| Ok(s.remove(&signer)))? .mutate(|s| Ok(s.remove(&signer)))?
{ {
return Err(Error::new( return Err(Error::new(
eyre!( eyre!("{}", t!("registry.os.version.signer-not-authorized", signer = signer, version = version)),
"{}",
t!(
"registry.os.version.signer-not-authorized",
signer = signer,
version = version
)
),
ErrorKind::NotFound, ErrorKind::NotFound,
)); ));
} }

View File

@@ -112,10 +112,7 @@ pub async fn add_package(
Ok(()) Ok(())
} else { } else {
Err(Error::new( Err(Error::new(eyre!("{}", t!("registry.package.add.unauthorized")), ErrorKind::Authorization))
eyre!("{}", t!("registry.package.add.unauthorized")),
ErrorKind::Authorization,
))
} }
}) })
.await .await
@@ -135,24 +132,20 @@ pub struct CliAddPackageParams {
} }
pub async fn cli_add_package( pub async fn cli_add_package(
ctx: CliContext, HandlerArgs {
CliAddPackageParams { context: ctx,
file, parent_method,
url, method,
no_verify, params:
}: CliAddPackageParams, CliAddPackageParams {
file,
url,
no_verify,
},
..
}: HandlerArgs<CliContext, CliAddPackageParams>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let s9pk = S9pk::open(&file, None).await?; let s9pk = S9pk::open(&file, None).await?;
cli_add_package_impl(ctx, s9pk, url, no_verify).await
}
pub async fn cli_add_package_impl(
ctx: CliContext,
s9pk: S9pk,
url: Vec<Url>,
no_verify: bool,
) -> Result<(), Error> {
let manifest = s9pk.as_manifest();
let progress = FullProgressTracker::new(); let progress = FullProgressTracker::new();
let mut sign_phase = progress.add_phase(InternedString::intern("Signing File"), Some(1)); let mut sign_phase = progress.add_phase(InternedString::intern("Signing File"), Some(1));
@@ -174,16 +167,8 @@ pub async fn cli_add_package_impl(
Some(1), Some(1),
); );
let progress_task = progress.progress_bar_task(&format!( let progress_task =
"Adding {}@{}{} to registry...", progress.progress_bar_task(&format!("Adding {} to registry...", file.display()));
manifest.id,
manifest.version,
manifest
.hardware_requirements
.arch
.as_ref()
.map_or(String::new(), |a| format!(" ({})", a.iter().join("/")))
));
sign_phase.start(); sign_phase.start();
let commitment = s9pk.as_archive().commitment().await?; let commitment = s9pk.as_archive().commitment().await?;
@@ -200,7 +185,7 @@ pub async fn cli_add_package_impl(
index_phase.start(); index_phase.start();
ctx.call_remote::<RegistryContext>( ctx.call_remote::<RegistryContext>(
"package.add", &parent_method.into_iter().chain(method).join("."),
imbl_value::json!({ imbl_value::json!({
"urls": &url, "urls": &url,
"signature": AnySignature::Ed25519(signature), "signature": AnySignature::Ed25519(signature),
@@ -243,12 +228,8 @@ pub async fn remove_package(
}: RemovePackageParams, }: RemovePackageParams,
) -> Result<bool, Error> { ) -> Result<bool, Error> {
let peek = ctx.db.peek().await; let peek = ctx.db.peek().await;
let signer = signer.ok_or_else(|| { let signer =
Error::new( signer.ok_or_else(|| Error::new(eyre!("{}", t!("registry.package.missing-signer")), ErrorKind::InvalidRequest))?;
eyre!("{}", t!("registry.package.missing-signer")),
ErrorKind::InvalidRequest,
)
})?;
let signer_guid = peek.as_index().as_signers().get_signer(&signer)?; let signer_guid = peek.as_index().as_signers().get_signer(&signer)?;
let rev = ctx let rev = ctx
@@ -289,10 +270,7 @@ pub async fn remove_package(
} }
Ok(()) Ok(())
} else { } else {
Err(Error::new( Err(Error::new(eyre!("{}", t!("registry.package.unauthorized")), ErrorKind::Authorization))
eyre!("{}", t!("registry.package.unauthorized")),
ErrorKind::Authorization,
))
} }
}) })
.await; .await;
@@ -367,10 +345,7 @@ pub async fn add_mirror(
Ok(()) Ok(())
} else { } else {
Err(Error::new( Err(Error::new(eyre!("{}", t!("registry.package.add-mirror.unauthorized")), ErrorKind::Authorization))
eyre!("{}", t!("registry.package.add-mirror.unauthorized")),
ErrorKind::Authorization,
))
} }
}) })
.await .await
@@ -486,12 +461,8 @@ pub async fn remove_mirror(
}: RemoveMirrorParams, }: RemoveMirrorParams,
) -> Result<(), Error> { ) -> Result<(), Error> {
let peek = ctx.db.peek().await; let peek = ctx.db.peek().await;
let signer = signer.ok_or_else(|| { let signer =
Error::new( signer.ok_or_else(|| Error::new(eyre!("{}", t!("registry.package.missing-signer")), ErrorKind::InvalidRequest))?;
eyre!("{}", t!("registry.package.missing-signer")),
ErrorKind::InvalidRequest,
)
})?;
let signer_guid = peek.as_index().as_signers().get_signer(&signer)?; let signer_guid = peek.as_index().as_signers().get_signer(&signer)?;
ctx.db ctx.db
@@ -530,10 +501,7 @@ pub async fn remove_mirror(
} }
Ok(()) Ok(())
} else { } else {
Err(Error::new( Err(Error::new(eyre!("{}", t!("registry.package.remove-mirror.unauthorized")), ErrorKind::Authorization))
eyre!("{}", t!("registry.package.remove-mirror.unauthorized")),
ErrorKind::Authorization,
))
} }
}) })
.await .await

View File

@@ -52,14 +52,10 @@ pub fn package_api<C: Context>() -> ParentHandler<C> {
if !changed { if !changed {
tracing::warn!( tracing::warn!(
"{}", "{}",
t!( t!("registry.package.remove-not-exist",
"registry.package.remove-not-exist",
id = args.params.id, id = args.params.id,
version = args.params.version, version = args.params.version,
sighash = args sighash = args.params.sighash.map_or(String::new(), |h| format!("#{h}"))
.params
.sighash
.map_or(String::new(), |h| format!("#{h}"))
) )
); );
} }
@@ -100,6 +96,7 @@ pub fn package_api<C: Context>() -> ParentHandler<C> {
) )
.subcommand( .subcommand(
"category", "category",
category::category_api::<C>().with_about("about.update-categories-registry"), category::category_api::<C>()
.with_about("about.update-categories-registry"),
) )
} }

View File

@@ -118,14 +118,7 @@ pub async fn remove_package_signer(
.is_some() .is_some()
{ {
return Err(Error::new( return Err(Error::new(
eyre!( eyre!("{}", t!("registry.package.signer.not-authorized", signer = signer, id = id)),
"{}",
t!(
"registry.package.signer.not-authorized",
signer = signer,
id = id
)
),
ErrorKind::NotFound, ErrorKind::NotFound,
)); ));
} }

View File

@@ -1,13 +1,10 @@
use std::ops::Deref;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use clap::Parser; use clap::Parser;
use rpc_toolkit::{Empty, HandlerExt, ParentHandler, from_fn_async}; use rpc_toolkit::{Empty, HandlerExt, ParentHandler, from_fn_async};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::process::Command;
use ts_rs::TS; use ts_rs::TS;
use url::Url;
use crate::ImageId; use crate::ImageId;
use crate::context::CliContext; use crate::context::CliContext;
@@ -16,9 +13,9 @@ use crate::s9pk::manifest::Manifest;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::v2::SIG_CONTEXT; use crate::s9pk::v2::SIG_CONTEXT;
use crate::s9pk::v2::pack::ImageConfig; use crate::s9pk::v2::pack::ImageConfig;
use crate::util::Apply;
use crate::util::io::{TmpDir, create_file, open_file}; use crate::util::io::{TmpDir, create_file, open_file};
use crate::util::serde::{HandlerExtSerde, apply_expr}; use crate::util::serde::{HandlerExtSerde, apply_expr};
use crate::util::{Apply, Invoke};
pub const SKIP_ENV: &[&str] = &["TERM", "container", "HOME", "HOSTNAME"]; pub const SKIP_ENV: &[&str] = &["TERM", "container", "HOME", "HOSTNAME"];
@@ -64,12 +61,6 @@ pub fn s9pk() -> ParentHandler<CliContext> {
.no_display() .no_display()
.with_about("about.convert-s9pk-v1-to-v2"), .with_about("about.convert-s9pk-v1-to-v2"),
) )
.subcommand(
"publish",
from_fn_async(publish)
.no_display()
.with_about("about.publish-s9pk"),
)
} }
#[derive(Deserialize, Serialize, Parser)] #[derive(Deserialize, Serialize, Parser)]
@@ -265,61 +256,3 @@ async fn convert(ctx: CliContext, S9pkPath { s9pk: s9pk_path }: S9pkPath) -> Res
tokio::fs::rename(tmp_path, s9pk_path).await?; tokio::fs::rename(tmp_path, s9pk_path).await?;
Ok(()) Ok(())
} }
async fn publish(ctx: CliContext, S9pkPath { s9pk: s9pk_path }: S9pkPath) -> Result<(), Error> {
let filename = s9pk_path.file_name().unwrap().to_string_lossy();
let s9pk = super::S9pk::open(&s9pk_path, None).await?;
let manifest = s9pk.as_manifest();
let path = [
manifest.id.deref(),
manifest.version.as_str(),
filename.deref(),
];
let mut s3url = ctx
.s9pk_s3base
.as_ref()
.ok_or_else(|| Error::new(eyre!("--s9pk-s3base required"), ErrorKind::InvalidRequest))?
.clone();
s3url
.path_segments_mut()
.map_err(|_| {
Error::new(
eyre!("s9pk-s3base is invalid (missing protocol?)"),
ErrorKind::ParseUrl,
)
})?
.pop_if_empty()
.extend(path);
let mut s3dest = format!(
"s3://{}",
ctx.s9pk_s3bucket
.as_deref()
.or_else(|| s3url
.host_str()
.and_then(|h| h.split_once(".").map(|h| h.0)))
.ok_or_else(|| {
Error::new(eyre!("--s9pk-s3bucket required"), ErrorKind::InvalidRequest)
})?,
)
.parse::<Url>()?;
s3dest
.path_segments_mut()
.map_err(|_| {
Error::new(
eyre!("s9pk-s3base is invalid (missing protocol?)"),
ErrorKind::ParseUrl,
)
})?
.pop_if_empty()
.extend(path);
Command::new("s3cmd")
.arg("put")
.arg("-P")
.arg(s9pk_path)
.arg(s3dest.as_str())
.capture(false)
.invoke(ErrorKind::Network)
.await?;
crate::registry::package::add::cli_add_package_impl(ctx, s9pk, vec![s3url], false).await
}

View File

@@ -7,7 +7,6 @@ use clap::Parser;
use futures::future::{BoxFuture, ready}; use futures::future::{BoxFuture, ready};
use futures::{FutureExt, TryStreamExt}; use futures::{FutureExt, TryStreamExt};
use imbl_value::InternedString; use imbl_value::InternedString;
use itertools::Itertools;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::process::Command; use tokio::process::Command;
use tokio::sync::OnceCell; use tokio::sync::OnceCell;
@@ -386,17 +385,13 @@ impl ImageSource {
pub fn ingredients(&self) -> Vec<PathBuf> { pub fn ingredients(&self) -> Vec<PathBuf> {
match self { match self {
Self::Packed => Vec::new(), Self::Packed => Vec::new(),
Self::DockerBuild { Self::DockerBuild { dockerfile, .. } => {
dockerfile, vec![
workdir, dockerfile
..
} => {
vec![dockerfile.clone().unwrap_or_else(|| {
workdir
.as_deref() .as_deref()
.unwrap_or(Path::new(".")) .unwrap_or(Path::new("Dockerfile"))
.join("Dockerfile") .to_owned(),
})] ]
} }
Self::DockerTag(_) => Vec::new(), Self::DockerTag(_) => Vec::new(),
} }
@@ -687,7 +682,7 @@ pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
let manifest = s9pk.as_manifest_mut(); let manifest = s9pk.as_manifest_mut();
manifest.git_hash = Some(GitHash::from_path(params.path()).await?); manifest.git_hash = Some(GitHash::from_path(params.path()).await?);
if !params.arch.is_empty() { if !params.arch.is_empty() {
let arches: BTreeSet<InternedString> = match manifest.hardware_requirements.arch.take() { let arches = match manifest.hardware_requirements.arch.take() {
Some(a) => params Some(a) => params
.arch .arch
.iter() .iter()
@@ -696,41 +691,10 @@ pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
.collect(), .collect(),
None => params.arch.iter().cloned().collect(), None => params.arch.iter().cloned().collect(),
}; };
if arches.is_empty() { manifest
return Err(Error::new( .images
eyre!( .values_mut()
"none of the requested architectures ({:?}) are supported by this package", .for_each(|c| c.arch = c.arch.intersection(&arches).cloned().collect());
params.arch
),
ErrorKind::InvalidRequest,
));
}
manifest.images.iter_mut().for_each(|(id, c)| {
let filtered = c
.arch
.intersection(&arches)
.cloned()
.collect::<BTreeSet<_>>();
if filtered.is_empty() {
if let Some(arch) = &c.emulate_missing_as {
tracing::warn!(
"ImageId {} is not available for {}, emulating as {}",
id,
arches.iter().join("/"),
arch
);
c.arch = [arch.clone()].into_iter().collect();
} else {
tracing::error!(
"ImageId {} is not available for {}",
id,
arches.iter().join("/"),
);
}
} else {
c.arch = filtered;
}
});
manifest.hardware_requirements.arch = Some(arches); manifest.hardware_requirements.arch = Some(arches);
} }

View File

@@ -102,13 +102,7 @@ pub fn update_tasks(
} }
} }
None => { None => {
tracing::error!( tracing::error!("{}", t!("service.action.action-request-invalid-state", task = format!("{:?}", v.task)));
"{}",
t!(
"service.action.action-request-invalid-state",
task = format!("{:?}", v.task)
)
);
} }
}, },
} }
@@ -157,10 +151,7 @@ impl Handler<RunAction> for ServiceActor {
.de()?; .de()?;
if matches!(&action.visibility, ActionVisibility::Disabled(_)) { if matches!(&action.visibility, ActionVisibility::Disabled(_)) {
return Err(Error::new( return Err(Error::new(
eyre!( eyre!("{}", t!("service.action.action-is-disabled", action_id = action_id)),
"{}",
t!("service.action.action-is-disabled", action_id = action_id)
),
ErrorKind::Action, ErrorKind::Action,
)); ));
} }
@@ -171,13 +162,7 @@ impl Handler<RunAction> for ServiceActor {
_ => false, _ => false,
} { } {
return Err(Error::new( return Err(Error::new(
eyre!( eyre!("{}", t!("service.action.service-not-in-allowed-status", action_id = action_id)),
"{}",
t!(
"service.action.service-not-in-allowed-status",
action_id = action_id
)
),
ErrorKind::Action, ErrorKind::Action,
)); ));
} }

View File

@@ -181,10 +181,7 @@ async fn run_action(
if package_id != &context.seed.id { if package_id != &context.seed.id {
return Err(Error::new( return Err(Error::new(
eyre!( eyre!("{}", t!("service.effects.action.calling-actions-on-other-packages-unsupported")),
"{}",
t!("service.effects.action.calling-actions-on-other-packages-unsupported")
),
ErrorKind::InvalidRequest, ErrorKind::InvalidRequest,
)); ));
context context
@@ -229,10 +226,7 @@ async fn create_task(
TaskCondition::InputNotMatches => { TaskCondition::InputNotMatches => {
let Some(input) = task.input.as_ref() else { let Some(input) = task.input.as_ref() else {
return Err(Error::new( return Err(Error::new(
eyre!( eyre!("{}", t!("service.effects.action.input-not-matches-requires-input")),
"{}",
t!("service.effects.action.input-not-matches-requires-input")
),
ErrorKind::InvalidRequest, ErrorKind::InvalidRequest,
)); ));
}; };
@@ -250,12 +244,7 @@ async fn create_task(
else { else {
return Err(Error::new( return Err(Error::new(
eyre!( eyre!(
"{}", "{}", t!("service.effects.action.action-has-no-input", action_id = task.action_id, package_id = task.package_id)
t!(
"service.effects.action.action-has-no-input",
action_id = task.action_id,
package_id = task.package_id
)
), ),
ErrorKind::InvalidRequest, ErrorKind::InvalidRequest,
)); ));

View File

@@ -79,7 +79,7 @@ pub async fn mount(
} }
IdMapped::new( IdMapped::new(
Bind::new(source).with_type(filetype).recursive(true), Bind::new(source).with_type(filetype),
IdMap::stack( IdMap::stack(
vec![IdMap { vec![IdMap {
from_id: 0, from_id: 0,

View File

@@ -10,7 +10,6 @@ use crate::rpc_continuations::Guid;
use crate::service::effects::prelude::*; use crate::service::effects::prelude::*;
use crate::service::persistent_container::Subcontainer; use crate::service::persistent_container::Subcontainer;
use crate::util::Invoke; use crate::util::Invoke;
use crate::util::io::write_file_owned_atomic;
pub const NVIDIA_OVERLAY_PATH: &str = "/var/tmp/startos/nvidia-overlay"; pub const NVIDIA_OVERLAY_PATH: &str = "/var/tmp/startos/nvidia-overlay";
pub const NVIDIA_OVERLAY_DEBIAN: &str = "/var/tmp/startos/nvidia-overlay/debian"; pub const NVIDIA_OVERLAY_DEBIAN: &str = "/var/tmp/startos/nvidia-overlay/debian";
@@ -95,7 +94,7 @@ pub async fn create_subcontainer_fs(
.cloned() .cloned()
{ {
let guid = Guid::new(); let guid = Guid::new();
let lxc_container = context let rootfs_dir = context
.seed .seed
.persistent_container .persistent_container
.lxc_container .lxc_container
@@ -105,9 +104,8 @@ pub async fn create_subcontainer_fs(
eyre!("PersistentContainer has been destroyed"), eyre!("PersistentContainer has been destroyed"),
ErrorKind::Incoherent, ErrorKind::Incoherent,
) )
})?; })?
let container_guid = &lxc_container.guid; .rootfs_dir();
let rootfs_dir = lxc_container.rootfs_dir();
let mountpoint = rootfs_dir let mountpoint = rootfs_dir
.join("media/startos/subcontainers") .join("media/startos/subcontainers")
.join(guid.as_ref()); .join(guid.as_ref());
@@ -156,20 +154,6 @@ pub async fn create_subcontainer_fs(
.arg(&mountpoint) .arg(&mountpoint)
.invoke(ErrorKind::Filesystem) .invoke(ErrorKind::Filesystem)
.await?; .await?;
write_file_owned_atomic(
mountpoint.join("etc/hostname"),
format!("{container_guid}\n"),
100000,
100000,
)
.await?;
write_file_owned_atomic(
mountpoint.join("etc/hosts"),
format!("127.0.0.1\tlocalhost\n127.0.1.1\t{container_guid}\n::1\tlocalhost ip6-localhost ip6-loopback\n"),
100000,
100000,
)
.await?;
tracing::info!("Mounted overlay {guid} for {image_id}"); tracing::info!("Mounted overlay {guid} for {image_id}");
context context
.seed .seed

View File

@@ -1,6 +1,7 @@
use std::collections::BTreeMap;
use std::ffi::{OsStr, OsString, c_int}; use std::ffi::{OsStr, OsString, c_int};
use std::fs::File; use std::fs::File;
use std::io::{BufRead, BufReader, IsTerminal, Read}; use std::io::{IsTerminal, Read};
use std::os::unix::process::{CommandExt, ExitStatusExt}; use std::os::unix::process::{CommandExt, ExitStatusExt};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process::{Command as StdCommand, Stdio}; use std::process::{Command as StdCommand, Stdio};
@@ -145,160 +146,95 @@ impl ExecParams {
let mut cmd = StdCommand::new(command); let mut cmd = StdCommand::new(command);
let mut uid = Err(None); let passwd = std::fs::read_to_string(chroot.join("etc/passwd"))
let mut gid = Err(None); .with_ctx(|_| (ErrorKind::Filesystem, "read /etc/passwd"))
let mut needs_home = true; .log_err()
.unwrap_or_default();
let mut home = None;
if let Some(user) = user { if let Some((uid, gid)) =
if let Some((u, g)) = user.split_once(":") { if let Some(uid) = user.as_deref().and_then(|u| u.parse::<u32>().ok()) {
uid = Err(Some(u)); Some((uid, uid))
gid = Err(Some(g)); } else if let Some((uid, gid)) = user
.as_deref()
.and_then(|u| u.split_once(":"))
.and_then(|(u, g)| Some((u.parse::<u32>().ok()?, g.parse::<u32>().ok()?)))
{
Some((uid, gid))
} else if let Some(user) = user {
Some(
if let Some((uid, gid)) = passwd.lines().find_map(|l| {
let l = l.trim();
let mut split = l.split(":");
if user != split.next()? {
return None;
}
split.next(); // throw away x
let uid = split.next()?.parse().ok()?;
let gid = split.next()?.parse().ok()?;
split.next(); // throw away group name
home = split.next();
Some((uid, gid))
// uid gid
}) {
(uid, gid)
} else if user == "root" {
(0, 0)
} else {
None.or_not_found(lazy_format!("{user} in /etc/passwd"))?
},
)
} else { } else {
uid = Err(Some(user)); None
} }
} {
if home.is_none() {
home = passwd.lines().find_map(|l| {
let l = l.trim();
let mut split = l.split(":");
if let Some(u) = uid.err().flatten().and_then(|u| u.parse::<u32>().ok()) { split.next(); // throw away user name
uid = Ok(u); split.next(); // throw away x
} if split.next()?.parse::<u32>().ok()? != uid {
if let Some(g) = gid.err().flatten().and_then(|g| g.parse::<u32>().ok()) { return None;
gid = Ok(g); }
} split.next(); // throw away gid
split.next(); // throw away group name
let mut update_env = |line: &str| { split.next()
if let Some((k, v)) = line.split_once("=") { })
needs_home &= k != "HOME"; };
cmd.env(k, v); std::os::unix::fs::chown("/proc/self/fd/0", Some(uid), Some(gid)).ok();
} else { std::os::unix::fs::chown("/proc/self/fd/1", Some(uid), Some(gid)).ok();
tracing::warn!("Invalid line in env: {line}"); std::os::unix::fs::chown("/proc/self/fd/2", Some(uid), Some(gid)).ok();
} cmd.uid(uid);
cmd.gid(gid);
} else {
home = Some("/root");
}
cmd.env("HOME", home.unwrap_or("/"));
let env_string = if let Some(env_file) = &env_file {
std::fs::read_to_string(env_file)
.with_ctx(|_| (ErrorKind::Filesystem, lazy_format!("read {env:?}")))?
} else {
Default::default()
}; };
if let Some(f) = env_file { let env = env_string
let mut lines = BufReader::new( .lines()
File::open(&f).with_ctx(|_| (ErrorKind::Filesystem, format!("open r {f:?}")))?, .chain(env.iter().map(|l| l.as_str()))
) .map(|l| l.trim())
.lines(); .filter_map(|l| l.split_once("="))
while let Some(line) = lines.next().transpose()? { .collect::<BTreeMap<_, _>>();
update_env(&line);
}
}
for line in env {
update_env(&line);
}
let needs_gid = Err(None) == gid;
let mut username = InternedString::intern("root");
let mut handle_passwd_line = |line: &str| -> Option<()> {
let l = line.trim();
let mut split = l.split(":");
let user = split.next()?;
match uid {
Err(Some(u)) if u != user => return None,
_ => (),
}
split.next(); // throw away x
let u: u32 = split.next()?.parse().ok()?;
match uid {
Err(Some(_)) => uid = Ok(u),
Err(None) if u == 0 => uid = Ok(u),
Ok(uid) if uid != u => return None,
_ => (),
}
username = user.into();
if !needs_gid && !needs_home {
return Some(());
}
let g = split.next()?;
if needs_gid {
gid = Ok(g.parse().ok()?);
}
if needs_home {
split.next(); // throw away group name
let home = split.next()?;
cmd.env("HOME", home);
}
Some(())
};
let mut lines = BufReader::new(
File::open(chroot.join("etc/passwd"))
.with_ctx(|_| (ErrorKind::Filesystem, format!("open r /etc/passwd")))?,
)
.lines();
while let Some(line) = lines.next().transpose()? {
if handle_passwd_line(&line).is_some() {
break;
}
}
let mut groups = Vec::new();
let mut handle_group_line = |line: &str| -> Option<()> {
let l = line.trim();
let mut split = l.split(":");
let name = split.next()?;
split.next()?; // throw away x
let g = split.next()?.parse::<u32>().ok()?;
match gid {
Err(Some(n)) if n == name => gid = Ok(g),
_ => (),
}
let users = split.next()?;
if users.split(",").any(|u| u == &*username) {
groups.push(nix::unistd::Gid::from_raw(g));
}
Some(())
};
let mut lines = BufReader::new(
File::open(chroot.join("etc/group"))
.with_ctx(|_| (ErrorKind::Filesystem, format!("open r /etc/group")))?,
)
.lines();
while let Some(line) = lines.next().transpose()? {
if handle_group_line(&line).is_none() {
tracing::warn!("Invalid /etc/group line: {line}");
}
}
std::os::unix::fs::chroot(chroot) std::os::unix::fs::chroot(chroot)
.with_ctx(|_| (ErrorKind::Filesystem, lazy_format!("chroot {chroot:?}")))?; .with_ctx(|_| (ErrorKind::Filesystem, lazy_format!("chroot {chroot:?}")))?;
if let Ok(uid) = uid {
if uid != 0 {
std::os::unix::fs::chown("/proc/self/fd/0", Some(uid), gid.ok()).ok();
std::os::unix::fs::chown("/proc/self/fd/1", Some(uid), gid.ok()).ok();
std::os::unix::fs::chown("/proc/self/fd/2", Some(uid), gid.ok()).ok();
}
}
// Handle credential changes in pre_exec to control the order:
// setgroups must happen before setgid/setuid (requires CAP_SETGID)
{
let set_uid = uid.ok();
let set_gid = gid.ok();
unsafe {
cmd.pre_exec(move || {
if !groups.is_empty() {
nix::unistd::setgroups(&groups)
.map_err(|e| std::io::Error::from_raw_os_error(e as i32))?;
}
if let Some(gid) = set_gid {
nix::unistd::setgid(nix::unistd::Gid::from_raw(gid))
.map_err(|e| std::io::Error::from_raw_os_error(e as i32))?;
}
if let Some(uid) = set_uid {
nix::unistd::setuid(nix::unistd::Uid::from_raw(uid))
.map_err(|e| std::io::Error::from_raw_os_error(e as i32))?;
}
Ok(())
});
}
}
cmd.args(args); cmd.args(args);
for (k, v) in env {
cmd.env(k, v);
}
if let Some(workdir) = workdir { if let Some(workdir) = workdir {
cmd.current_dir(workdir); cmd.current_dir(workdir);

View File

@@ -28,6 +28,7 @@ use tokio_tungstenite::tungstenite::protocol::frame::coding::CloseCode;
use ts_rs::TS; use ts_rs::TS;
use url::Url; use url::Url;
use crate::context::{CliContext, RpcContext}; use crate::context::{CliContext, RpcContext};
use crate::db::model::package::{ use crate::db::model::package::{
InstalledState, ManifestPreference, PackageState, PackageStateMatchModelRef, TaskSeverity, InstalledState, ManifestPreference, PackageState, PackageStateMatchModelRef, TaskSeverity,
@@ -50,7 +51,6 @@ use crate::util::io::{AsyncReadStream, AtomicFile, TermSize, delete_file};
use crate::util::net::WebSocket; use crate::util::net::WebSocket;
use crate::util::serde::Pem; use crate::util::serde::Pem;
use crate::util::sync::SyncMutex; use crate::util::sync::SyncMutex;
use crate::util::tui::choose;
use crate::volume::data_dir; use crate::volume::data_dir;
use crate::{ActionId, CAP_1_KiB, DATA_DIR, HostId, ImageId, PackageId}; use crate::{ActionId, CAP_1_KiB, DATA_DIR, HostId, ImageId, PackageId};
@@ -184,10 +184,7 @@ impl ServiceRef {
Arc::try_unwrap(service.seed) Arc::try_unwrap(service.seed)
.map_err(|_| { .map_err(|_| {
Error::new( Error::new(
eyre!( eyre!("{}", t!("service.mod.service-actor-seed-held-after-shutdown")),
"{}",
t!("service.mod.service-actor-seed-held-after-shutdown")
),
ErrorKind::Unknown, ErrorKind::Unknown,
) )
})? })?
@@ -379,16 +376,12 @@ impl Service {
{ {
Ok(PackageState::Installed(InstalledState { manifest })) Ok(PackageState::Installed(InstalledState { manifest }))
} else { } else {
Err(Error::new( Err(Error::new(eyre!("{}", t!("service.mod.race-condition-detected")), ErrorKind::Database))
eyre!("{}", t!("service.mod.race-condition-detected")),
ErrorKind::Database,
))
} }
}) })
} }
}) })
.await .await.result?;
.result?;
handle_installed(s9pk).await handle_installed(s9pk).await
} }
PackageStateMatchModelRef::Removing(_) | PackageStateMatchModelRef::Restoring(_) => { PackageStateMatchModelRef::Removing(_) | PackageStateMatchModelRef::Restoring(_) => {
@@ -454,13 +447,7 @@ impl Service {
handle_installed(S9pk::open(s9pk_path, Some(id)).await?).await handle_installed(S9pk::open(s9pk_path, Some(id)).await?).await
} }
PackageStateMatchModelRef::Error(e) => Err(Error::new( PackageStateMatchModelRef::Error(e) => Err(Error::new(
eyre!( eyre!("{}", t!("service.mod.failed-to-parse-package-data-entry", error = format!("{e:?}"))),
"{}",
t!(
"service.mod.failed-to-parse-package-data-entry",
error = format!("{e:?}")
)
),
ErrorKind::Deserialization, ErrorKind::Deserialization,
)), )),
} }
@@ -566,11 +553,7 @@ impl Service {
true true
} else { } else {
tracing::warn!( tracing::warn!(
"{}", "{}", t!("service.mod.deleting-task-action-no-longer-exists", id = id)
t!(
"service.mod.deleting-task-action-no-longer-exists",
id = id
)
); );
false false
} }
@@ -710,19 +693,6 @@ pub async fn rebuild(ctx: RpcContext, RebuildParams { id }: RebuildParams) -> Re
Ok(()) Ok(())
} }
#[derive(Debug, Deserialize, Serialize)]
pub struct SubcontainerInfo {
pub id: Guid,
pub name: InternedString,
pub image_id: ImageId,
}
impl std::fmt::Display for SubcontainerInfo {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let SubcontainerInfo { id, name, image_id } = self;
write!(f, "{id} => Name: {name}; Image: {image_id}")
}
}
#[derive(Deserialize, Serialize, TS)] #[derive(Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct AttachParams { pub struct AttachParams {
@@ -736,7 +706,7 @@ pub struct AttachParams {
#[serde(rename = "__Auth_session")] #[serde(rename = "__Auth_session")]
session: Option<InternedString>, session: Option<InternedString>,
#[ts(type = "string | null")] #[ts(type = "string | null")]
subcontainer: Option<Guid>, subcontainer: Option<InternedString>,
#[ts(type = "string | null")] #[ts(type = "string | null")]
name: Option<InternedString>, name: Option<InternedString>,
#[ts(type = "string | null")] #[ts(type = "string | null")]
@@ -759,7 +729,7 @@ pub async fn attach(
user, user,
}: AttachParams, }: AttachParams,
) -> Result<Guid, Error> { ) -> Result<Guid, Error> {
let (container_id, subcontainer_id, image_id, user, workdir, root_command) = { let (container_id, subcontainer_id, image_id, workdir, root_command) = {
let id = &id; let id = &id;
let service = ctx.services.get(id).await; let service = ctx.services.get(id).await;
@@ -800,6 +770,13 @@ pub async fn attach(
} }
}) })
.collect(); .collect();
let format_subcontainer_pair = |(guid, wrapper): (&Guid, &Subcontainer)| {
format!(
"{guid} imageId: {image_id} name: \"{name}\"",
name = &wrapper.name,
image_id = &wrapper.image_id
)
};
let Some((subcontainer_id, image_id)) = subcontainer_ids let Some((subcontainer_id, image_id)) = subcontainer_ids
.first() .first()
.map::<(Guid, ImageId), _>(|&x| (x.0.clone(), x.1.image_id.clone())) .map::<(Guid, ImageId), _>(|&x| (x.0.clone(), x.1.image_id.clone()))
@@ -810,17 +787,14 @@ pub async fn attach(
.lock() .lock()
.await .await
.iter() .iter()
.map(|(g, s)| SubcontainerInfo { .map(format_subcontainer_pair)
id: g.clone(), .join("\n");
name: s.name.clone(),
image_id: s.image_id.clone(),
})
.collect::<Vec<_>>();
return Err(Error::new( return Err(Error::new(
eyre!("{}", t!("service.mod.no-matching-subcontainers", id = id)), eyre!(
"{}", t!("service.mod.no-matching-subcontainers", id = id, subcontainers = subcontainers)
),
ErrorKind::NotFound, ErrorKind::NotFound,
) ));
.with_info(to_value(&subcontainers)?));
}; };
let passwd = root_dir let passwd = root_dir
@@ -840,39 +814,31 @@ pub async fn attach(
) )
.with_kind(ErrorKind::Deserialization)?; .with_kind(ErrorKind::Deserialization)?;
let user = user let root_command = get_passwd_command(
.clone() passwd,
.or_else(|| image_meta["user"].as_str().map(InternedString::intern)) user.as_deref()
.unwrap_or_else(|| InternedString::intern("root")); .or_else(|| image_meta["user"].as_str())
.unwrap_or("root"),
let root_command = get_passwd_command(passwd, &*user).await; )
.await;
let workdir = image_meta["workdir"].as_str().map(|s| s.to_owned()); let workdir = image_meta["workdir"].as_str().map(|s| s.to_owned());
if subcontainer_ids.len() > 1 { if subcontainer_ids.len() > 1 {
let subcontainers = subcontainer_ids let subcontainer_ids = subcontainer_ids
.into_iter() .into_iter()
.map(|(g, s)| SubcontainerInfo { .map(format_subcontainer_pair)
id: g.clone(), .join("\n");
name: s.name.clone(),
image_id: s.image_id.clone(),
})
.collect::<Vec<_>>();
return Err(Error::new( return Err(Error::new(
eyre!( eyre!("{}", t!("service.mod.multiple-subcontainers-found", id = id, subcontainer_ids = subcontainer_ids)),
"{}",
t!("service.mod.multiple-subcontainers-found", id = id,)
),
ErrorKind::InvalidRequest, ErrorKind::InvalidRequest,
) ));
.with_info(to_value(&subcontainers)?));
} }
( (
service_ref.container_id()?, service_ref.container_id()?,
subcontainer_id, subcontainer_id,
image_id, image_id,
user.into(),
workdir, workdir,
root_command, root_command,
) )
@@ -889,7 +855,7 @@ pub async fn attach(
pty_size: Option<TermSize>, pty_size: Option<TermSize>,
image_id: ImageId, image_id: ImageId,
workdir: Option<String>, workdir: Option<String>,
user: InternedString, user: Option<InternedString>,
root_command: &RootCommand, root_command: &RootCommand,
) -> Result<(), Error> { ) -> Result<(), Error> {
use axum::extract::ws::Message; use axum::extract::ws::Message;
@@ -910,9 +876,11 @@ pub async fn attach(
Path::new("/media/startos/images") Path::new("/media/startos/images")
.join(image_id) .join(image_id)
.with_extension("env"), .with_extension("env"),
) );
.arg("--user")
.arg(&*user); if let Some(user) = user {
cmd.arg("--user").arg(&*user);
}
if let Some(workdir) = workdir { if let Some(workdir) = workdir {
cmd.arg("--workdir").arg(workdir); cmd.arg("--workdir").arg(workdir);
@@ -1095,6 +1063,45 @@ pub async fn attach(
Ok(guid) Ok(guid)
} }
#[derive(Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
pub struct ListSubcontainersParams {
pub id: PackageId,
}
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
pub struct SubcontainerInfo {
pub name: InternedString,
pub image_id: ImageId,
}
pub async fn list_subcontainers(
ctx: RpcContext,
ListSubcontainersParams { id }: ListSubcontainersParams,
) -> Result<BTreeMap<Guid, SubcontainerInfo>, Error> {
let service = ctx.services.get(&id).await;
let service_ref = service.as_ref().or_not_found(&id)?;
let container = &service_ref.seed.persistent_container;
let subcontainers = container.subcontainers.lock().await;
let result: BTreeMap<Guid, SubcontainerInfo> = subcontainers
.iter()
.map(|(guid, subcontainer)| {
(
guid.clone(),
SubcontainerInfo {
name: subcontainer.name.clone(),
image_id: subcontainer.image_id.clone(),
},
)
})
.collect();
Ok(result)
}
async fn get_passwd_command(etc_passwd_path: PathBuf, user: &str) -> RootCommand { async fn get_passwd_command(etc_passwd_path: PathBuf, user: &str) -> RootCommand {
async { async {
let mut file = tokio::fs::File::open(etc_passwd_path).await?; let mut file = tokio::fs::File::open(etc_passwd_path).await?;
@@ -1113,13 +1120,7 @@ async fn get_passwd_command(etc_passwd_path: PathBuf, user: &str) -> RootCommand
} }
} }
Err(Error::new( Err(Error::new(
eyre!( eyre!("{}", t!("service.mod.could-not-parse-etc-passwd", contents = contents)),
"{}",
t!(
"service.mod.could-not-parse-etc-passwd",
contents = contents
)
),
ErrorKind::Filesystem, ErrorKind::Filesystem,
)) ))
} }
@@ -1175,34 +1176,23 @@ pub async fn cli_attach(
None None
}; };
let method = parent_method.into_iter().chain(method).join(".");
let mut params = json!({
"id": params.id,
"command": params.command,
"tty": tty,
"stderrTty": stderr.is_terminal(),
"ptySize": if tty { TermSize::get_current() } else { None },
"subcontainer": params.subcontainer,
"imageId": params.image_id,
"name": params.name,
"user": params.user,
});
let guid: Guid = from_value( let guid: Guid = from_value(
match context context
.call_remote::<RpcContext>(&method, params.clone()) .call_remote::<RpcContext>(
.await &parent_method.into_iter().chain(method).join("."),
{ json!({
Ok(a) => a, "id": params.id,
Err(e) => { "command": params.command,
let prompt = e.to_string(); "tty": tty,
let options: Vec<SubcontainerInfo> = from_value(e.info)?; "stderrTty": stderr.is_terminal(),
let choice = choose(&prompt, &options).await?; "ptySize": if tty { TermSize::get_current() } else { None },
params["subcontainer"] = to_value(&choice.id)?; "subcontainer": params.subcontainer,
context "imageId": params.image_id,
.call_remote::<RpcContext>(&method, params.clone()) "name": params.name,
.await? "user": params.user,
} }),
}, )
.await?,
)?; )?;
let mut ws = context.ws_continuation(guid).await?; let mut ws = context.ws_continuation(guid).await?;

View File

@@ -364,14 +364,7 @@ impl PersistentContainer {
let handle = NonDetachingJoinHandle::from(tokio::spawn(async move { let handle = NonDetachingJoinHandle::from(tokio::spawn(async move {
let chown_status = async { let chown_status = async {
let res = server.run_unix(&path, |err| { let res = server.run_unix(&path, |err| {
tracing::error!( tracing::error!("{}", t!("service.persistent-container.error-on-unix-socket", path = path.display(), error = err))
"{}",
t!(
"service.persistent-container.error-on-unix-socket",
path = path.display(),
error = err
)
)
})?; })?;
Command::new("chown") Command::new("chown")
.arg("100000:100000") .arg("100000:100000")
@@ -393,10 +386,7 @@ impl PersistentContainer {
})); }));
let shutdown = recv.await.map_err(|_| { let shutdown = recv.await.map_err(|_| {
Error::new( Error::new(
eyre!( eyre!("{}", t!("service.persistent-container.unix-socket-server-panicked")),
"{}",
t!("service.persistent-container.unix-socket-server-panicked")
),
ErrorKind::Unknown, ErrorKind::Unknown,
) )
})??; })??;
@@ -483,13 +473,7 @@ impl PersistentContainer {
if let Some(destroy) = self.destroy(uninit) { if let Some(destroy) = self.destroy(uninit) {
destroy.await?; destroy.await?;
} }
tracing::info!( tracing::info!("{}", t!("service.persistent-container.service-exited", id = self.s9pk.as_manifest().id));
"{}",
t!(
"service.persistent-container.service-exited",
id = self.s9pk.as_manifest().id
)
);
Ok(()) Ok(())
} }

View File

@@ -47,18 +47,9 @@ impl Actor for ServiceActor {
} }
.await .await
{ {
tracing::error!( tracing::error!("{}", t!("service.service-actor.error-synchronizing-state", error = e));
"{}",
t!("service.service-actor.error-synchronizing-state", error = e)
);
tracing::debug!("{e:?}"); tracing::debug!("{e:?}");
tracing::error!( tracing::error!("{}", t!("service.service-actor.retrying-in-seconds", seconds = SYNC_RETRY_COOLDOWN_SECONDS));
"{}",
t!(
"service.service-actor.retrying-in-seconds",
seconds = SYNC_RETRY_COOLDOWN_SECONDS
)
);
tokio::time::timeout( tokio::time::timeout(
Duration::from_secs(SYNC_RETRY_COOLDOWN_SECONDS), Duration::from_secs(SYNC_RETRY_COOLDOWN_SECONDS),
async { async {

View File

@@ -62,13 +62,7 @@ pub async fn cleanup(ctx: &RpcContext, id: &PackageId, soft: bool) -> Result<(),
| PackageState::Removing(InstalledState { manifest }) => manifest, | PackageState::Removing(InstalledState { manifest }) => manifest,
s => { s => {
return Err(Error::new( return Err(Error::new(
eyre!( eyre!("{}", t!("service.uninstall.invalid-package-state-for-cleanup", state = format!("{s:?}"))),
"{}",
t!(
"service.uninstall.invalid-package-state-for-cleanup",
state = format!("{s:?}")
)
),
ErrorKind::InvalidRequest, ErrorKind::InvalidRequest,
)); ));
} }

View File

@@ -1,3 +1,4 @@
use crate::PLATFORM; use crate::PLATFORM;
use crate::context::RpcContext; use crate::context::RpcContext;
use crate::disk::main::export; use crate::disk::main::export;
@@ -35,33 +36,18 @@ impl Shutdown {
.invoke(crate::ErrorKind::Journald) .invoke(crate::ErrorKind::Journald)
.await .await
{ {
tracing::error!( tracing::error!("{}", t!("shutdown.error-stopping-journald", error = e.to_string()));
"{}",
t!("shutdown.error-stopping-journald", error = e.to_string())
);
tracing::debug!("{:?}", e); tracing::debug!("{:?}", e);
} }
if let Some(guid) = &self.disk_guid { if let Some(guid) = &self.disk_guid {
if let Err(e) = export(guid, crate::DATA_DIR).await { if let Err(e) = export(guid, crate::DATA_DIR).await {
tracing::error!( tracing::error!("{}", t!("shutdown.error-exporting-volume-group", error = e.to_string()));
"{}",
t!(
"shutdown.error-exporting-volume-group",
error = e.to_string()
)
);
tracing::debug!("{:?}", e); tracing::debug!("{:?}", e);
} }
} }
if &*PLATFORM != "raspberrypi" || self.restart { if &*PLATFORM != "raspberrypi" || self.restart {
if let Err(e) = SHUTDOWN.play().await { if let Err(e) = SHUTDOWN.play().await {
tracing::error!( tracing::error!("{}", t!("shutdown.error-playing-shutdown-song", error = e.to_string()));
"{}",
t!(
"shutdown.error-playing-shutdown-song",
error = e.to_string()
)
);
tracing::debug!("{:?}", e); tracing::debug!("{:?}", e);
} }
} }

View File

@@ -19,7 +19,8 @@ pub fn tunnel_api<C: Context>() -> ParentHandler<C> {
.subcommand("web", super::web::web_api::<C>()) .subcommand("web", super::web::web_api::<C>())
.subcommand( .subcommand(
"db", "db",
super::db::db_api::<C>().with_about("about.commands-interact-with-db-dump-apply"), super::db::db_api::<C>()
.with_about("about.commands-interact-with-db-dump-apply"),
) )
.subcommand( .subcommand(
"auth", "auth",

View File

@@ -6,7 +6,6 @@ use clap::{ArgAction, Parser};
use color_eyre::eyre::{Result, eyre}; use color_eyre::eyre::{Result, eyre};
use exver::{Version, VersionRange}; use exver::{Version, VersionRange};
use futures::TryStreamExt; use futures::TryStreamExt;
use imbl::OrdMap;
use imbl_value::json; use imbl_value::json;
use itertools::Itertools; use itertools::Itertools;
use patch_db::json_ptr::JsonPointer; use patch_db::json_ptr::JsonPointer;
@@ -180,10 +179,7 @@ pub async fn cli_update_system(
Some(v) => { Some(v) => {
if let Some(progress) = res.progress { if let Some(progress) = res.progress {
let mut ws = context.ws_continuation(progress).await?; let mut ws = context.ws_continuation(progress).await?;
let mut progress = PhasedProgressBar::new(&t!( let mut progress = PhasedProgressBar::new(&t!("update.updating-to-version", version = v.to_string()));
"update.updating-to-version",
version = v.to_string()
));
let mut prev = None; let mut prev = None;
while let Some(msg) = ws.try_next().await.with_kind(ErrorKind::Network)? { while let Some(msg) = ws.try_next().await.with_kind(ErrorKind::Network)? {
if let tokio_tungstenite::tungstenite::Message::Text(msg) = msg { if let tokio_tungstenite::tungstenite::Message::Text(msg) = msg {
@@ -206,10 +202,7 @@ pub async fn cli_update_system(
} }
println!("{}", t!("update.complete-restart-to-apply")) println!("{}", t!("update.complete-restart-to-apply"))
} else { } else {
println!( println!("{}", t!("update.updating-to-version", version = v.to_string()))
"{}",
t!("update.updating-to-version", version = v.to_string())
)
} }
} }
} }
@@ -246,7 +239,6 @@ async fn maybe_do_update(
let mut available = from_value::<BTreeMap<Version, OsVersionInfo>>( let mut available = from_value::<BTreeMap<Version, OsVersionInfo>>(
ctx.call_remote_with::<RegistryContext, _>( ctx.call_remote_with::<RegistryContext, _>(
"os.version.get", "os.version.get",
OrdMap::new(),
json!({ json!({
"source": current_version, "source": current_version,
"target": target, "target": target,

View File

@@ -248,7 +248,7 @@ impl<'a> Invoke<'a> for ExtendedCommand<'a> {
.or(Some(&res.stdout)) .or(Some(&res.stdout))
.filter(|a| !a.is_empty()) .filter(|a| !a.is_empty())
.and_then(|a| std::str::from_utf8(a).ok()) .and_then(|a| std::str::from_utf8(a).ok())
.unwrap_or(&format!("{} exited with {}", cmd_str, res.status)) .unwrap_or(&format!("{} exited with code {}", cmd_str, res.status))
); );
Ok(res.stdout) Ok(res.stdout)
} else { } else {
@@ -309,7 +309,7 @@ impl<'a> Invoke<'a> for ExtendedCommand<'a> {
.filter(|a| !a.is_empty()) .filter(|a| !a.is_empty())
.and_then(|a| std::str::from_utf8(a).ok()) .and_then(|a| std::str::from_utf8(a).ok())
.unwrap_or(&format!( .unwrap_or(&format!(
"{} exited with {}", "{} exited with code {}",
cmd.as_std().get_program().to_string_lossy(), cmd.as_std().get_program().to_string_lossy(),
res.status res.status
)) ))

View File

@@ -97,11 +97,7 @@ impl WebSocket {
if self.ping_state.is_some() { if self.ping_state.is_some() {
self.fused = true; self.fused = true;
break Poll::Ready(Some(Err(axum::Error::new(eyre!( break Poll::Ready(Some(Err(axum::Error::new(eyre!(
"{}", "{}", t!("util.net.websocket-ping-timeout", timeout = format!("{PING_TIMEOUT:?}"))
t!(
"util.net.websocket-ping-timeout",
timeout = format!("{PING_TIMEOUT:?}")
)
))))); )))));
} }
self.ping_state = Some((false, rand::random())); self.ping_state = Some((false, rand::random()));

View File

@@ -1151,13 +1151,7 @@ pub fn apply_expr(input: jaq_core::Val, expr: &str) -> Result<jaq_core::Val, Err
let Some(expr) = expr else { let Some(expr) = expr else {
return Err(Error::new( return Err(Error::new(
eyre!( eyre!("{}", t!("util.serde.failed-to-parse-expression", errors = format!("{:?}", errs))),
"{}",
t!(
"util.serde.failed-to-parse-expression",
errors = format!("{:?}", errs)
)
),
crate::ErrorKind::InvalidRequest, crate::ErrorKind::InvalidRequest,
)); ));
}; };
@@ -1173,13 +1167,7 @@ pub fn apply_expr(input: jaq_core::Val, expr: &str) -> Result<jaq_core::Val, Err
if !errs.is_empty() { if !errs.is_empty() {
return Err(Error::new( return Err(Error::new(
eyre!( eyre!("{}", t!("util.serde.failed-to-compile-expression", errors = format!("{:?}", errs))),
"{}",
t!(
"util.serde.failed-to-compile-expression",
errors = format!("{:?}", errs)
)
),
crate::ErrorKind::InvalidRequest, crate::ErrorKind::InvalidRequest,
)); ));
}; };

View File

@@ -50,10 +50,7 @@ pub async fn prompt<T, E: std::fmt::Display, Parse: FnMut(&str) -> Result<T, E>>
} }
} }
ReadlineEvent::Eof | ReadlineEvent::Interrupted => { ReadlineEvent::Eof | ReadlineEvent::Interrupted => {
return Err(Error::new( return Err(Error::new(eyre!("{}", t!("util.tui.aborted")), ErrorKind::Cancelled));
eyre!("{}", t!("util.tui.aborted")),
ErrorKind::Cancelled,
));
} }
_ => (), _ => (),
} }
@@ -86,10 +83,7 @@ pub async fn prompt_multiline<
Err(e) => writeln!(&mut rl_ctx.shared_writer, "{e}")?, Err(e) => writeln!(&mut rl_ctx.shared_writer, "{e}")?,
}, },
ReadlineEvent::Eof | ReadlineEvent::Interrupted => { ReadlineEvent::Eof | ReadlineEvent::Interrupted => {
return Err(Error::new( return Err(Error::new(eyre!("{}", t!("util.tui.aborted")), ErrorKind::Cancelled));
eyre!("{}", t!("util.tui.aborted")),
ErrorKind::Cancelled,
));
} }
_ => (), _ => (),
} }
@@ -125,10 +119,7 @@ pub async fn choose_custom_display<'t, T>(
.await .await
.map_err(map_miette)?; .map_err(map_miette)?;
if choice.len() < 1 { if choice.len() < 1 {
return Err(Error::new( return Err(Error::new(eyre!("{}", t!("util.tui.aborted")), ErrorKind::Cancelled));
eyre!("{}", t!("util.tui.aborted")),
ErrorKind::Cancelled,
));
} }
let (idx, choice_str) = string_choices let (idx, choice_str) = string_choices
.iter() .iter()

View File

@@ -58,9 +58,8 @@ mod v0_4_0_alpha_15;
mod v0_4_0_alpha_16; mod v0_4_0_alpha_16;
mod v0_4_0_alpha_17; mod v0_4_0_alpha_17;
mod v0_4_0_alpha_18; mod v0_4_0_alpha_18;
mod v0_4_0_alpha_19;
pub type Current = v0_4_0_alpha_19::Version; // VERSION_BUMP pub type Current = v0_4_0_alpha_18::Version; // VERSION_BUMP
impl Current { impl Current {
#[instrument(skip(self, db))] #[instrument(skip(self, db))]
@@ -180,8 +179,7 @@ enum Version {
V0_4_0_alpha_15(Wrapper<v0_4_0_alpha_15::Version>), V0_4_0_alpha_15(Wrapper<v0_4_0_alpha_15::Version>),
V0_4_0_alpha_16(Wrapper<v0_4_0_alpha_16::Version>), V0_4_0_alpha_16(Wrapper<v0_4_0_alpha_16::Version>),
V0_4_0_alpha_17(Wrapper<v0_4_0_alpha_17::Version>), V0_4_0_alpha_17(Wrapper<v0_4_0_alpha_17::Version>),
V0_4_0_alpha_18(Wrapper<v0_4_0_alpha_18::Version>), V0_4_0_alpha_18(Wrapper<v0_4_0_alpha_18::Version>), // VERSION_BUMP
V0_4_0_alpha_19(Wrapper<v0_4_0_alpha_19::Version>), // VERSION_BUMP
Other(exver::Version), Other(exver::Version),
} }
@@ -242,8 +240,7 @@ impl Version {
Self::V0_4_0_alpha_15(v) => DynVersion(Box::new(v.0)), Self::V0_4_0_alpha_15(v) => DynVersion(Box::new(v.0)),
Self::V0_4_0_alpha_16(v) => DynVersion(Box::new(v.0)), Self::V0_4_0_alpha_16(v) => DynVersion(Box::new(v.0)),
Self::V0_4_0_alpha_17(v) => DynVersion(Box::new(v.0)), Self::V0_4_0_alpha_17(v) => DynVersion(Box::new(v.0)),
Self::V0_4_0_alpha_18(v) => DynVersion(Box::new(v.0)), Self::V0_4_0_alpha_18(v) => DynVersion(Box::new(v.0)), // VERSION_BUMP
Self::V0_4_0_alpha_19(v) => DynVersion(Box::new(v.0)), // VERSION_BUMP
Self::Other(v) => { Self::Other(v) => {
return Err(Error::new( return Err(Error::new(
eyre!("unknown version {v}"), eyre!("unknown version {v}"),
@@ -296,8 +293,7 @@ impl Version {
Version::V0_4_0_alpha_15(Wrapper(x)) => x.semver(), Version::V0_4_0_alpha_15(Wrapper(x)) => x.semver(),
Version::V0_4_0_alpha_16(Wrapper(x)) => x.semver(), Version::V0_4_0_alpha_16(Wrapper(x)) => x.semver(),
Version::V0_4_0_alpha_17(Wrapper(x)) => x.semver(), Version::V0_4_0_alpha_17(Wrapper(x)) => x.semver(),
Version::V0_4_0_alpha_18(Wrapper(x)) => x.semver(), Version::V0_4_0_alpha_18(Wrapper(x)) => x.semver(), // VERSION_BUMP
Version::V0_4_0_alpha_19(Wrapper(x)) => x.semver(), // VERSION_BUMP
Version::Other(x) => x.clone(), Version::Other(x) => x.clone(),
} }
} }

View File

@@ -1,37 +0,0 @@
use exver::{PreReleaseSegment, VersionRange};
use super::v0_3_5::V0_3_0_COMPAT;
use super::{VersionT, v0_4_0_alpha_18};
use crate::prelude::*;
lazy_static::lazy_static! {
static ref V0_4_0_alpha_19: exver::Version = exver::Version::new(
[0, 4, 0],
[PreReleaseSegment::String("alpha".into()), 19.into()]
);
}
#[derive(Clone, Copy, Debug, Default)]
pub struct Version;
impl VersionT for Version {
type Previous = v0_4_0_alpha_18::Version;
type PreUpRes = ();
async fn pre_up(self) -> Result<Self::PreUpRes, Error> {
Ok(())
}
fn semver(self) -> exver::Version {
V0_4_0_alpha_19.clone()
}
fn compat(self) -> &'static VersionRange {
&V0_3_0_COMPAT
}
#[instrument(skip_all)]
fn up(self, _db: &mut Value, _: Self::PreUpRes) -> Result<Value, Error> {
Ok(Value::Null)
}
fn down(self, _db: &mut Value) -> Result<(), Error> {
Ok(())
}
}

View File

@@ -16,14 +16,14 @@ import {
MountParams, MountParams,
StatusInfo, StatusInfo,
Manifest, Manifest,
} from './osBindings' } from "./osBindings"
import { import {
PackageId, PackageId,
Dependencies, Dependencies,
ServiceInterfaceId, ServiceInterfaceId,
SmtpValue, SmtpValue,
ActionResult, ActionResult,
} from './types' } from "./types"
/** Used to reach out from the pure js runtime */ /** Used to reach out from the pure js runtime */
@@ -155,13 +155,13 @@ export type Effects = {
/** Returns a PEM encoded fullchain for the hostnames specified */ /** Returns a PEM encoded fullchain for the hostnames specified */
getSslCertificate: (options: { getSslCertificate: (options: {
hostnames: string[] hostnames: string[]
algorithm?: 'ecdsa' | 'ed25519' algorithm?: "ecdsa" | "ed25519"
callback?: () => void callback?: () => void
}) => Promise<[string, string, string]> }) => Promise<[string, string, string]>
/** Returns a PEM encoded private key corresponding to the certificate for the hostnames specified */ /** Returns a PEM encoded private key corresponding to the certificate for the hostnames specified */
getSslKey: (options: { getSslKey: (options: {
hostnames: string[] hostnames: string[]
algorithm?: 'ecdsa' | 'ed25519' algorithm?: "ecdsa" | "ed25519"
}) => Promise<string> }) => Promise<string>
/** sets the version that this service's data has been migrated to */ /** sets the version that this service's data has been migrated to */

View File

@@ -1,7 +1,7 @@
import * as T from '../types' import * as T from "../types"
import * as IST from '../actions/input/inputSpecTypes' import * as IST from "../actions/input/inputSpecTypes"
import { Action, ActionInfo } from './setupActions' import { Action, ActionInfo } from "./setupActions"
import { ExtractInputSpecType } from './input/builder/inputSpec' import { ExtractInputSpecType } from "./input/builder/inputSpec"
export type RunActionInput<Input> = export type RunActionInput<Input> =
| Input | Input
@@ -53,17 +53,17 @@ type TaskBase = {
replayId?: string replayId?: string
} }
type TaskInput<T extends ActionInfo<T.ActionId, any>> = { type TaskInput<T extends ActionInfo<T.ActionId, any>> = {
kind: 'partial' kind: "partial"
value: T.DeepPartial<GetActionInputType<T>> value: T.DeepPartial<GetActionInputType<T>>
} }
export type TaskOptions<T extends ActionInfo<T.ActionId, any>> = TaskBase & export type TaskOptions<T extends ActionInfo<T.ActionId, any>> = TaskBase &
( (
| { | {
when?: Exclude<T.TaskTrigger, { condition: 'input-not-matches' }> when?: Exclude<T.TaskTrigger, { condition: "input-not-matches" }>
input?: TaskInput<T> input?: TaskInput<T>
} }
| { | {
when: T.TaskTrigger & { condition: 'input-not-matches' } when: T.TaskTrigger & { condition: "input-not-matches" }
input: TaskInput<T> input: TaskInput<T>
} }
) )

View File

@@ -1,6 +1,6 @@
import { InputSpec } from './inputSpec' import { InputSpec } from "./inputSpec"
import { List } from './list' import { List } from "./list"
import { Value } from './value' import { Value } from "./value"
import { Variants } from './variants' import { Variants } from "./variants"
export { InputSpec as InputSpec, List, Value, Variants } export { InputSpec as InputSpec, List, Value, Variants }

View File

@@ -1,9 +1,9 @@
import { ValueSpec } from '../inputSpecTypes' import { ValueSpec } from "../inputSpecTypes"
import { Value } from './value' import { Value } from "./value"
import { _ } from '../../../util' import { _ } from "../../../util"
import { Effects } from '../../../Effects' import { Effects } from "../../../Effects"
import { Parser, object } from 'ts-matches' import { Parser, object } from "ts-matches"
import { DeepPartial } from '../../../types' import { DeepPartial } from "../../../types"
export type LazyBuildOptions = { export type LazyBuildOptions = {
effects: Effects effects: Effects

View File

@@ -1,4 +1,4 @@
import { InputSpec, LazyBuild } from './inputSpec' import { InputSpec, LazyBuild } from "./inputSpec"
import { import {
ListValueSpecText, ListValueSpecText,
Pattern, Pattern,
@@ -6,8 +6,8 @@ import {
UniqueBy, UniqueBy,
ValueSpecList, ValueSpecList,
ValueSpecListOf, ValueSpecListOf,
} from '../inputSpecTypes' } from "../inputSpecTypes"
import { Parser, arrayOf, string } from 'ts-matches' import { Parser, arrayOf, string } from "ts-matches"
export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> { export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
private constructor( private constructor(
@@ -55,7 +55,7 @@ export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
* @description Informs the browser how to behave and which keyboard to display on mobile * @description Informs the browser how to behave and which keyboard to display on mobile
* @default "text" * @default "text"
*/ */
inputmode?: ListValueSpecText['inputmode'] inputmode?: ListValueSpecText["inputmode"]
/** /**
* @description Displays a button that will generate a random string according to the provided charset and len attributes. * @description Displays a button that will generate a random string according to the provided charset and len attributes.
*/ */
@@ -65,21 +65,21 @@ export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const validator = arrayOf(string) const validator = arrayOf(string)
return new List<string[]>(() => { return new List<string[]>(() => {
const spec = { const spec = {
type: 'text' as const, type: "text" as const,
placeholder: null, placeholder: null,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
masked: false, masked: false,
inputmode: 'text' as const, inputmode: "text" as const,
generate: null, generate: null,
patterns: aSpec.patterns || [], patterns: aSpec.patterns || [],
...aSpec, ...aSpec,
} }
const built: ValueSpecListOf<'text'> = { const built: ValueSpecListOf<"text"> = {
description: null, description: null,
warning: null, warning: null,
default: [], default: [],
type: 'list' as const, type: "list" as const,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
disabled: false, disabled: false,
@@ -106,7 +106,7 @@ export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
minLength?: number | null minLength?: number | null
maxLength?: number | null maxLength?: number | null
patterns?: Pattern[] patterns?: Pattern[]
inputmode?: ListValueSpecText['inputmode'] inputmode?: ListValueSpecText["inputmode"]
} }
}>, }>,
) { ) {
@@ -114,21 +114,21 @@ export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new List<string[]>(async (options) => { return new List<string[]>(async (options) => {
const { spec: aSpec, ...a } = await getA(options) const { spec: aSpec, ...a } = await getA(options)
const spec = { const spec = {
type: 'text' as const, type: "text" as const,
placeholder: null, placeholder: null,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
masked: false, masked: false,
inputmode: 'text' as const, inputmode: "text" as const,
generate: null, generate: null,
patterns: aSpec.patterns || [], patterns: aSpec.patterns || [],
...aSpec, ...aSpec,
} }
const built: ValueSpecListOf<'text'> = { const built: ValueSpecListOf<"text"> = {
description: null, description: null,
warning: null, warning: null,
default: [], default: [],
type: 'list' as const, type: "list" as const,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
disabled: false, disabled: false,
@@ -162,7 +162,7 @@ export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const { spec: previousSpecSpec, ...restSpec } = aSpec const { spec: previousSpecSpec, ...restSpec } = aSpec
const built = await previousSpecSpec.build(options) const built = await previousSpecSpec.build(options)
const spec = { const spec = {
type: 'object' as const, type: "object" as const,
displayAs: null, displayAs: null,
uniqueBy: null, uniqueBy: null,
...restSpec, ...restSpec,
@@ -179,7 +179,7 @@ export class List<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
warning: null, warning: null,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
type: 'list' as const, type: "list" as const,
disabled: false, disabled: false,
...value, ...value,
}, },

View File

@@ -1,6 +1,6 @@
import { InputSpec, LazyBuild } from './inputSpec' import { InputSpec, LazyBuild } from "./inputSpec"
import { List } from './list' import { List } from "./list"
import { UnionRes, UnionResStaticValidatedAs, Variants } from './variants' import { UnionRes, UnionResStaticValidatedAs, Variants } from "./variants"
import { import {
Pattern, Pattern,
RandomString, RandomString,
@@ -9,9 +9,9 @@ import {
ValueSpecHidden, ValueSpecHidden,
ValueSpecText, ValueSpecText,
ValueSpecTextarea, ValueSpecTextarea,
} from '../inputSpecTypes' } from "../inputSpecTypes"
import { DefaultString } from '../inputSpecTypes' import { DefaultString } from "../inputSpecTypes"
import { _, once } from '../../../util' import { _, once } from "../../../util"
import { import {
Parser, Parser,
any, any,
@@ -23,8 +23,8 @@ import {
number, number,
object, object,
string, string,
} from 'ts-matches' } from "ts-matches"
import { DeepPartial } from '../../../types' import { DeepPartial } from "../../../types"
export const fileInfoParser = object({ export const fileInfoParser = object({
path: string, path: string,
@@ -42,7 +42,7 @@ const testForAsRequiredParser = once(
function asRequiredParser<Type, Input extends { required: boolean }>( function asRequiredParser<Type, Input extends { required: boolean }>(
parser: Parser<unknown, Type>, parser: Parser<unknown, Type>,
input: Input, input: Input,
): Parser<unknown, AsRequired<Type, Input['required']>> { ): Parser<unknown, AsRequired<Type, Input["required"]>> {
if (testForAsRequiredParser()(input)) return parser as any if (testForAsRequiredParser()(input)) return parser as any
return parser.nullable() as any return parser.nullable() as any
} }
@@ -92,7 +92,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
spec: { spec: {
description: null, description: null,
warning: null, warning: null,
type: 'toggle' as const, type: "toggle" as const,
disabled: false, disabled: false,
immutable: a.immutable ?? false, immutable: a.immutable ?? false,
...a, ...a,
@@ -117,7 +117,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
spec: { spec: {
description: null, description: null,
warning: null, warning: null,
type: 'toggle' as const, type: "toggle" as const,
disabled: false, disabled: false,
immutable: false, immutable: false,
...(await a(options)), ...(await a(options)),
@@ -191,7 +191,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
* @description Informs the browser how to behave and which keyboard to display on mobile * @description Informs the browser how to behave and which keyboard to display on mobile
* @default "text" * @default "text"
*/ */
inputmode?: ValueSpecText['inputmode'] inputmode?: ValueSpecText["inputmode"]
/** /**
* @description Once set, the value can never be changed. * @description Once set, the value can never be changed.
* @default false * @default false
@@ -206,7 +206,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<AsRequired<string, Required>>( return new Value<AsRequired<string, Required>>(
async () => ({ async () => ({
spec: { spec: {
type: 'text' as const, type: "text" as const,
description: null, description: null,
warning: null, warning: null,
masked: false, masked: false,
@@ -214,7 +214,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
minLength: null, minLength: null,
maxLength: null, maxLength: null,
patterns: [], patterns: [],
inputmode: 'text', inputmode: "text",
disabled: false, disabled: false,
immutable: a.immutable ?? false, immutable: a.immutable ?? false,
generate: a.generate ?? null, generate: a.generate ?? null,
@@ -237,7 +237,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
minLength?: number | null minLength?: number | null
maxLength?: number | null maxLength?: number | null
patterns?: Pattern[] patterns?: Pattern[]
inputmode?: ValueSpecText['inputmode'] inputmode?: ValueSpecText["inputmode"]
disabled?: string | false disabled?: string | false
generate?: null | RandomString generate?: null | RandomString
}>, }>,
@@ -247,7 +247,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const a = await getA(options) const a = await getA(options)
return { return {
spec: { spec: {
type: 'text' as const, type: "text" as const,
description: null, description: null,
warning: null, warning: null,
masked: false, masked: false,
@@ -255,7 +255,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
minLength: null, minLength: null,
maxLength: null, maxLength: null,
patterns: [], patterns: [],
inputmode: 'text', inputmode: "text",
disabled: false, disabled: false,
immutable: false, immutable: false,
generate: a.generate ?? null, generate: a.generate ?? null,
@@ -334,7 +334,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
minRows: 3, minRows: 3,
maxRows: 6, maxRows: 6,
placeholder: null, placeholder: null,
type: 'textarea' as const, type: "textarea" as const,
disabled: false, disabled: false,
immutable: a.immutable ?? false, immutable: a.immutable ?? false,
...a, ...a,
@@ -371,7 +371,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
minRows: 3, minRows: 3,
maxRows: 6, maxRows: 6,
placeholder: null, placeholder: null,
type: 'textarea' as const, type: "textarea" as const,
disabled: false, disabled: false,
immutable: false, immutable: false,
...a, ...a,
@@ -444,7 +444,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<AsRequired<number, Required>>( return new Value<AsRequired<number, Required>>(
() => ({ () => ({
spec: { spec: {
type: 'number' as const, type: "number" as const,
description: null, description: null,
warning: null, warning: null,
min: null, min: null,
@@ -482,7 +482,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const a = await getA(options) const a = await getA(options)
return { return {
spec: { spec: {
type: 'number' as const, type: "number" as const,
description: null, description: null,
warning: null, warning: null,
min: null, min: null,
@@ -540,7 +540,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<AsRequired<string, Required>>( return new Value<AsRequired<string, Required>>(
() => ({ () => ({
spec: { spec: {
type: 'color' as const, type: "color" as const,
description: null, description: null,
warning: null, warning: null,
disabled: false, disabled: false,
@@ -568,7 +568,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const a = await getA(options) const a = await getA(options)
return { return {
spec: { spec: {
type: 'color' as const, type: "color" as const,
description: null, description: null,
warning: null, warning: null,
disabled: false, disabled: false,
@@ -618,7 +618,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
* @description Informs the browser how to behave and which date/time component to display. * @description Informs the browser how to behave and which date/time component to display.
* @default "datetime-local" * @default "datetime-local"
*/ */
inputmode?: ValueSpecDatetime['inputmode'] inputmode?: ValueSpecDatetime["inputmode"]
min?: string | null min?: string | null
max?: string | null max?: string | null
/** /**
@@ -631,10 +631,10 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<AsRequired<string, Required>>( return new Value<AsRequired<string, Required>>(
() => ({ () => ({
spec: { spec: {
type: 'datetime' as const, type: "datetime" as const,
description: null, description: null,
warning: null, warning: null,
inputmode: 'datetime-local', inputmode: "datetime-local",
min: null, min: null,
max: null, max: null,
step: null, step: null,
@@ -654,7 +654,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
warning?: string | null warning?: string | null
default: string | null default: string | null
required: Required required: Required
inputmode?: ValueSpecDatetime['inputmode'] inputmode?: ValueSpecDatetime["inputmode"]
min?: string | null min?: string | null
max?: string | null max?: string | null
disabled?: false | string disabled?: false | string
@@ -665,10 +665,10 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const a = await getA(options) const a = await getA(options)
return { return {
spec: { spec: {
type: 'datetime' as const, type: "datetime" as const,
description: null, description: null,
warning: null, warning: null,
inputmode: 'datetime-local', inputmode: "datetime-local",
min: null, min: null,
max: null, max: null,
disabled: false, disabled: false,
@@ -740,7 +740,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
spec: { spec: {
description: null, description: null,
warning: null, warning: null,
type: 'select' as const, type: "select" as const,
disabled: false, disabled: false,
immutable: a.immutable ?? false, immutable: a.immutable ?? false,
...a, ...a,
@@ -766,7 +766,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
spec: { spec: {
description: null, description: null,
warning: null, warning: null,
type: 'select' as const, type: "select" as const,
disabled: false, disabled: false,
immutable: false, immutable: false,
...a, ...a,
@@ -837,7 +837,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<(keyof Values & string)[]>( return new Value<(keyof Values & string)[]>(
() => ({ () => ({
spec: { spec: {
type: 'multiselect' as const, type: "multiselect" as const,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
warning: null, warning: null,
@@ -867,7 +867,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const a = await getA(options) const a = await getA(options)
return { return {
spec: { spec: {
type: 'multiselect' as const, type: "multiselect" as const,
minLength: null, minLength: null,
maxLength: null, maxLength: null,
warning: null, warning: null,
@@ -915,7 +915,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const built = await spec.build(options as any) const built = await spec.build(options as any)
return { return {
spec: { spec: {
type: 'object' as const, type: "object" as const,
description: null, description: null,
warning: null, warning: null,
...a, ...a,
@@ -933,7 +933,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
required: Required required: Required
}) { }) {
const buildValue = { const buildValue = {
type: 'file' as const, type: "file" as const,
description: null, description: null,
warning: null, warning: null,
...a, ...a,
@@ -960,7 +960,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<AsRequired<FileInfo, Required>, FileInfo | null>( return new Value<AsRequired<FileInfo, Required>, FileInfo | null>(
async (options) => { async (options) => {
const spec = { const spec = {
type: 'file' as const, type: "file" as const,
description: null, description: null,
warning: null, warning: null,
...(await a(options)), ...(await a(options)),
@@ -1034,7 +1034,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const built = await a.variants.build(options as any) const built = await a.variants.build(options as any)
return { return {
spec: { spec: {
type: 'union' as const, type: "union" as const,
description: null, description: null,
warning: null, warning: null,
disabled: false, disabled: false,
@@ -1109,7 +1109,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const built = await newValues.variants.build(options as any) const built = await newValues.variants.build(options as any)
return { return {
spec: { spec: {
type: 'union' as const, type: "union" as const,
description: null, description: null,
warning: null, warning: null,
...newValues, ...newValues,
@@ -1202,7 +1202,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
return new Value<T, typeof parser._TYPE>(async () => { return new Value<T, typeof parser._TYPE>(async () => {
return { return {
spec: { spec: {
type: 'hidden' as const, type: "hidden" as const,
} as ValueSpecHidden, } as ValueSpecHidden,
validator: parser, validator: parser,
} }
@@ -1221,7 +1221,7 @@ export class Value<Type extends StaticValidatedAs, StaticValidatedAs = Type> {
const validator = await getParser(options) const validator = await getParser(options)
return { return {
spec: { spec: {
type: 'hidden' as const, type: "hidden" as const,
} as ValueSpecHidden, } as ValueSpecHidden,
validator, validator,
} }

View File

@@ -1,12 +1,12 @@
import { DeepPartial } from '../../../types' import { DeepPartial } from "../../../types"
import { ValueSpec, ValueSpecUnion } from '../inputSpecTypes' import { ValueSpec, ValueSpecUnion } from "../inputSpecTypes"
import { import {
LazyBuild, LazyBuild,
InputSpec, InputSpec,
ExtractInputSpecType, ExtractInputSpecType,
ExtractInputSpecStaticValidatedAs, ExtractInputSpecStaticValidatedAs,
} from './inputSpec' } from "./inputSpec"
import { Parser, any, anyOf, literal, object } from 'ts-matches' import { Parser, any, anyOf, literal, object } from "ts-matches"
export type UnionRes< export type UnionRes<
VariantValues extends { VariantValues extends {
@@ -19,10 +19,10 @@ export type UnionRes<
> = { > = {
[key in keyof VariantValues]: { [key in keyof VariantValues]: {
selection: key selection: key
value: ExtractInputSpecType<VariantValues[key]['spec']> value: ExtractInputSpecType<VariantValues[key]["spec"]>
other?: { other?: {
[key2 in Exclude<keyof VariantValues & string, key>]?: DeepPartial< [key2 in Exclude<keyof VariantValues & string, key>]?: DeepPartial<
ExtractInputSpecType<VariantValues[key2]['spec']> ExtractInputSpecType<VariantValues[key2]["spec"]>
> >
} }
} }
@@ -39,10 +39,10 @@ export type UnionResStaticValidatedAs<
> = { > = {
[key in keyof VariantValues]: { [key in keyof VariantValues]: {
selection: key selection: key
value: ExtractInputSpecStaticValidatedAs<VariantValues[key]['spec']> value: ExtractInputSpecStaticValidatedAs<VariantValues[key]["spec"]>
other?: { other?: {
[key2 in Exclude<keyof VariantValues & string, key>]?: DeepPartial< [key2 in Exclude<keyof VariantValues & string, key>]?: DeepPartial<
ExtractInputSpecStaticValidatedAs<VariantValues[key2]['spec']> ExtractInputSpecStaticValidatedAs<VariantValues[key2]["spec"]>
> >
} }
} }
@@ -106,7 +106,7 @@ export class Variants<
> { > {
private constructor( private constructor(
public build: LazyBuild<{ public build: LazyBuild<{
spec: ValueSpecUnion['variants'] spec: ValueSpecUnion["variants"]
validator: Parser<unknown, UnionRes<VariantValues>> validator: Parser<unknown, UnionRes<VariantValues>>
}>, }>,
public readonly validator: Parser< public readonly validator: Parser<
@@ -126,7 +126,7 @@ export class Variants<
const staticValidators = {} as { const staticValidators = {} as {
[K in keyof VariantValues]: Parser< [K in keyof VariantValues]: Parser<
unknown, unknown,
ExtractInputSpecStaticValidatedAs<VariantValues[K]['spec']> ExtractInputSpecStaticValidatedAs<VariantValues[K]["spec"]>
> >
} }
for (const key in a) { for (const key in a) {
@@ -143,7 +143,7 @@ export class Variants<
const validators = {} as { const validators = {} as {
[K in keyof VariantValues]: Parser< [K in keyof VariantValues]: Parser<
unknown, unknown,
ExtractInputSpecType<VariantValues[K]['spec']> ExtractInputSpecType<VariantValues[K]["spec"]>
> >
} }
const variants = {} as { const variants = {} as {

View File

@@ -1,3 +1,3 @@
export * as constants from './inputSpecConstants' export * as constants from "./inputSpecConstants"
export * as types from './inputSpecTypes' export * as types from "./inputSpecTypes"
export * as builder from './builder' export * as builder from "./builder"

View File

@@ -1,8 +1,8 @@
import { SmtpValue } from '../../types' import { SmtpValue } from "../../types"
import { GetSystemSmtp, Patterns } from '../../util' import { GetSystemSmtp, Patterns } from "../../util"
import { InputSpec, InputSpecOf } from './builder/inputSpec' import { InputSpec, InputSpecOf } from "./builder/inputSpec"
import { Value } from './builder/value' import { Value } from "./builder/value"
import { Variants } from './builder/variants' import { Variants } from "./builder/variants"
/** /**
* Base SMTP settings, to be used by StartOS for system wide SMTP * Base SMTP settings, to be used by StartOS for system wide SMTP
@@ -11,12 +11,12 @@ export const customSmtp: InputSpec<SmtpValue> = InputSpec.of<
InputSpecOf<SmtpValue> InputSpecOf<SmtpValue>
>({ >({
server: Value.text({ server: Value.text({
name: 'SMTP Server', name: "SMTP Server",
required: true, required: true,
default: null, default: null,
}), }),
port: Value.number({ port: Value.number({
name: 'Port', name: "Port",
required: true, required: true,
default: 587, default: 587,
min: 1, min: 1,
@@ -24,20 +24,20 @@ export const customSmtp: InputSpec<SmtpValue> = InputSpec.of<
integer: true, integer: true,
}), }),
from: Value.text({ from: Value.text({
name: 'From Address', name: "From Address",
required: true, required: true,
default: null, default: null,
placeholder: 'Example Name <test@example.com>', placeholder: "Example Name <test@example.com>",
inputmode: 'email', inputmode: "email",
patterns: [Patterns.emailWithName], patterns: [Patterns.emailWithName],
}), }),
login: Value.text({ login: Value.text({
name: 'Login', name: "Login",
required: true, required: true,
default: null, default: null,
}), }),
password: Value.text({ password: Value.text({
name: 'Password', name: "Password",
required: false, required: false,
default: null, default: null,
masked: true, masked: true,
@@ -45,24 +45,24 @@ export const customSmtp: InputSpec<SmtpValue> = InputSpec.of<
}) })
const smtpVariants = Variants.of({ const smtpVariants = Variants.of({
disabled: { name: 'Disabled', spec: InputSpec.of({}) }, disabled: { name: "Disabled", spec: InputSpec.of({}) },
system: { system: {
name: 'System Credentials', name: "System Credentials",
spec: InputSpec.of({ spec: InputSpec.of({
customFrom: Value.text({ customFrom: Value.text({
name: 'Custom From Address', name: "Custom From Address",
description: description:
'A custom from address for this service. If not provided, the system from address will be used.', "A custom from address for this service. If not provided, the system from address will be used.",
required: false, required: false,
default: null, default: null,
placeholder: '<name>test@example.com', placeholder: "<name>test@example.com",
inputmode: 'email', inputmode: "email",
patterns: [Patterns.email], patterns: [Patterns.email],
}), }),
}), }),
}, },
custom: { custom: {
name: 'Custom Credentials', name: "Custom Credentials",
spec: customSmtp, spec: customSmtp,
}, },
}) })
@@ -71,11 +71,11 @@ const smtpVariants = Variants.of({
*/ */
export const smtpInputSpec = Value.dynamicUnion(async ({ effects }) => { export const smtpInputSpec = Value.dynamicUnion(async ({ effects }) => {
const smtp = await new GetSystemSmtp(effects).once() const smtp = await new GetSystemSmtp(effects).once()
const disabled = smtp ? [] : ['system'] const disabled = smtp ? [] : ["system"]
return { return {
name: 'SMTP', name: "SMTP",
description: 'Optionally provide an SMTP server for sending emails', description: "Optionally provide an SMTP server for sending emails",
default: 'disabled', default: "disabled",
disabled, disabled,
variants: smtpVariants, variants: smtpVariants,
} }

View File

@@ -1,18 +1,18 @@
export type InputSpec = Record<string, ValueSpec> export type InputSpec = Record<string, ValueSpec>
export type ValueType = export type ValueType =
| 'text' | "text"
| 'textarea' | "textarea"
| 'number' | "number"
| 'color' | "color"
| 'datetime' | "datetime"
| 'toggle' | "toggle"
| 'select' | "select"
| 'multiselect' | "multiselect"
| 'list' | "list"
| 'object' | "object"
| 'file' | "file"
| 'union' | "union"
| 'hidden' | "hidden"
export type ValueSpec = ValueSpecOf<ValueType> export type ValueSpec = ValueSpecOf<ValueType>
/** core spec types. These types provide the metadata for performing validations */ /** core spec types. These types provide the metadata for performing validations */
// prettier-ignore // prettier-ignore
@@ -37,13 +37,13 @@ export type ValueSpecText = {
description: string | null description: string | null
warning: string | null warning: string | null
type: 'text' type: "text"
patterns: Pattern[] patterns: Pattern[]
minLength: number | null minLength: number | null
maxLength: number | null maxLength: number | null
masked: boolean masked: boolean
inputmode: 'text' | 'email' | 'tel' | 'url' inputmode: "text" | "email" | "tel" | "url"
placeholder: string | null placeholder: string | null
required: boolean required: boolean
@@ -57,7 +57,7 @@ export type ValueSpecTextarea = {
description: string | null description: string | null
warning: string | null warning: string | null
type: 'textarea' type: "textarea"
patterns: Pattern[] patterns: Pattern[]
placeholder: string | null placeholder: string | null
minLength: number | null minLength: number | null
@@ -71,7 +71,7 @@ export type ValueSpecTextarea = {
} }
export type ValueSpecNumber = { export type ValueSpecNumber = {
type: 'number' type: "number"
min: number | null min: number | null
max: number | null max: number | null
integer: boolean integer: boolean
@@ -91,7 +91,7 @@ export type ValueSpecColor = {
description: string | null description: string | null
warning: string | null warning: string | null
type: 'color' type: "color"
required: boolean required: boolean
default: string | null default: string | null
disabled: false | string disabled: false | string
@@ -101,9 +101,9 @@ export type ValueSpecDatetime = {
name: string name: string
description: string | null description: string | null
warning: string | null warning: string | null
type: 'datetime' type: "datetime"
required: boolean required: boolean
inputmode: 'date' | 'time' | 'datetime-local' inputmode: "date" | "time" | "datetime-local"
min: string | null min: string | null
max: string | null max: string | null
default: string | null default: string | null
@@ -115,7 +115,7 @@ export type ValueSpecSelect = {
name: string name: string
description: string | null description: string | null
warning: string | null warning: string | null
type: 'select' type: "select"
default: string | null default: string | null
disabled: false | string | string[] disabled: false | string | string[]
immutable: boolean immutable: boolean
@@ -127,7 +127,7 @@ export type ValueSpecMultiselect = {
description: string | null description: string | null
warning: string | null warning: string | null
type: 'multiselect' type: "multiselect"
minLength: number | null minLength: number | null
maxLength: number | null maxLength: number | null
disabled: false | string | string[] disabled: false | string | string[]
@@ -139,7 +139,7 @@ export type ValueSpecToggle = {
description: string | null description: string | null
warning: string | null warning: string | null
type: 'toggle' type: "toggle"
default: boolean | null default: boolean | null
disabled: false | string disabled: false | string
immutable: boolean immutable: boolean
@@ -149,7 +149,7 @@ export type ValueSpecUnion = {
description: string | null description: string | null
warning: string | null warning: string | null
type: 'union' type: "union"
variants: Record< variants: Record<
string, string,
{ {
@@ -165,7 +165,7 @@ export type ValueSpecFile = {
name: string name: string
description: string | null description: string | null
warning: string | null warning: string | null
type: 'file' type: "file"
extensions: string[] extensions: string[]
required: boolean required: boolean
} }
@@ -173,13 +173,13 @@ export type ValueSpecObject = {
name: string name: string
description: string | null description: string | null
warning: string | null warning: string | null
type: 'object' type: "object"
spec: InputSpec spec: InputSpec
} }
export type ValueSpecHidden = { export type ValueSpecHidden = {
type: 'hidden' type: "hidden"
} }
export type ListValueSpecType = 'text' | 'object' export type ListValueSpecType = "text" | "object"
// prettier-ignore // prettier-ignore
export type ListValueSpecOf<T extends ListValueSpecType> = export type ListValueSpecOf<T extends ListValueSpecType> =
T extends "text" ? ListValueSpecText : T extends "text" ? ListValueSpecText :
@@ -190,7 +190,7 @@ export type ValueSpecListOf<T extends ListValueSpecType> = {
name: string name: string
description: string | null description: string | null
warning: string | null warning: string | null
type: 'list' type: "list"
spec: ListValueSpecOf<T> spec: ListValueSpecOf<T>
minLength: number | null minLength: number | null
maxLength: number | null maxLength: number | null
@@ -208,18 +208,18 @@ export type Pattern = {
description: string description: string
} }
export type ListValueSpecText = { export type ListValueSpecText = {
type: 'text' type: "text"
patterns: Pattern[] patterns: Pattern[]
minLength: number | null minLength: number | null
maxLength: number | null maxLength: number | null
masked: boolean masked: boolean
generate: null | RandomString generate: null | RandomString
inputmode: 'text' | 'email' | 'tel' | 'url' inputmode: "text" | "email" | "tel" | "url"
placeholder: string | null placeholder: string | null
} }
export type ListValueSpecObject = { export type ListValueSpecObject = {
type: 'object' type: "object"
spec: InputSpec spec: InputSpec
uniqueBy: UniqueBy uniqueBy: UniqueBy
displayAs: string | null displayAs: string | null
@@ -244,5 +244,5 @@ export function isValueSpecListOf<S extends ListValueSpecType>(
t: ValueSpec, t: ValueSpec,
s: S, s: S,
): t is ValueSpecListOf<S> & { spec: ListValueSpecOf<S> } { ): t is ValueSpecListOf<S> & { spec: ListValueSpecOf<S> } {
return 'spec' in t && t.spec.type === s return "spec" in t && t.spec.type === s
} }

View File

@@ -1,16 +1,16 @@
import { InputSpec } from './input/builder' import { InputSpec } from "./input/builder"
import { ExtractInputSpecType } from './input/builder/inputSpec' import { ExtractInputSpecType } from "./input/builder/inputSpec"
import * as T from '../types' import * as T from "../types"
import { once } from '../util' import { once } from "../util"
import { InitScript } from '../inits' import { InitScript } from "../inits"
import { Parser } from 'ts-matches' import { Parser } from "ts-matches"
type MaybeInputSpec<Type> = {} extends Type ? null : InputSpec<Type> type MaybeInputSpec<Type> = {} extends Type ? null : InputSpec<Type>
export type Run<A extends Record<string, any>> = (options: { export type Run<A extends Record<string, any>> = (options: {
effects: T.Effects effects: T.Effects
input: A input: A
spec: T.inputSpecTypes.InputSpec spec: T.inputSpecTypes.InputSpec
}) => Promise<(T.ActionResult & { version: '1' }) | null | void | undefined> }) => Promise<(T.ActionResult & { version: "1" }) | null | void | undefined>
export type GetInput<A extends Record<string, any>> = (options: { export type GetInput<A extends Record<string, any>> = (options: {
effects: T.Effects effects: T.Effects
}) => Promise<null | void | undefined | T.DeepPartial<A>> }) => Promise<null | void | undefined | T.DeepPartial<A>>
@@ -65,7 +65,7 @@ export class Action<Id extends T.ActionId, Type extends Record<string, any>>
InputSpecType extends InputSpec<Record<string, any>>, InputSpecType extends InputSpec<Record<string, any>>,
>( >(
id: Id, id: Id,
metadata: MaybeFn<Omit<T.ActionMetadata, 'hasInput'>>, metadata: MaybeFn<Omit<T.ActionMetadata, "hasInput">>,
inputSpec: InputSpecType, inputSpec: InputSpecType,
getInput: GetInput<ExtractInputSpecType<InputSpecType>>, getInput: GetInput<ExtractInputSpecType<InputSpecType>>,
run: Run<ExtractInputSpecType<InputSpecType>>, run: Run<ExtractInputSpecType<InputSpecType>>,
@@ -80,7 +80,7 @@ export class Action<Id extends T.ActionId, Type extends Record<string, any>>
} }
static withoutInput<Id extends T.ActionId>( static withoutInput<Id extends T.ActionId>(
id: Id, id: Id,
metadata: MaybeFn<Omit<T.ActionMetadata, 'hasInput'>>, metadata: MaybeFn<Omit<T.ActionMetadata, "hasInput">>,
run: Run<{}>, run: Run<{}>,
): Action<Id, {}> { ): Action<Id, {}> {
return new Action( return new Action(
@@ -156,7 +156,7 @@ export class Actions<
} }
addAction<A extends Action<T.ActionId, any>>( addAction<A extends Action<T.ActionId, any>>(
action: A, // TODO: prevent duplicates action: A, // TODO: prevent duplicates
): Actions<AllActions & { [id in A['id']]: A }> { ): Actions<AllActions & { [id in A["id"]]: A }> {
return new Actions({ ...this.actions, [action.id]: action }) return new Actions({ ...this.actions, [action.id]: action })
} }
async init(effects: T.Effects): Promise<void> { async init(effects: T.Effects): Promise<void> {

View File

@@ -1,11 +1,11 @@
import { ExtendedVersion, VersionRange } from '../exver' import { ExtendedVersion, VersionRange } from "../exver"
import { import {
PackageId, PackageId,
HealthCheckId, HealthCheckId,
DependencyRequirement, DependencyRequirement,
CheckDependenciesResult, CheckDependenciesResult,
} from '../types' } from "../types"
import { Effects } from '../Effects' import { Effects } from "../Effects"
export type CheckDependencies<DependencyId extends PackageId = PackageId> = { export type CheckDependencies<DependencyId extends PackageId = PackageId> = {
infoFor: (packageId: DependencyId) => { infoFor: (packageId: DependencyId) => {
@@ -73,11 +73,11 @@ export async function checkDependencies<
} }
const runningSatisfied = (packageId: DependencyId) => { const runningSatisfied = (packageId: DependencyId) => {
const dep = infoFor(packageId) const dep = infoFor(packageId)
return dep.requirement.kind !== 'running' || dep.result.isRunning return dep.requirement.kind !== "running" || dep.result.isRunning
} }
const tasksSatisfied = (packageId: DependencyId) => const tasksSatisfied = (packageId: DependencyId) =>
Object.entries(infoFor(packageId).result.tasks).filter( Object.entries(infoFor(packageId).result.tasks).filter(
([_, t]) => t?.active && t.task.severity === 'critical', ([_, t]) => t?.active && t.task.severity === "critical",
).length === 0 ).length === 0
const healthCheckSatisfied = ( const healthCheckSatisfied = (
packageId: DependencyId, packageId: DependencyId,
@@ -86,17 +86,17 @@ export async function checkDependencies<
const dep = infoFor(packageId) const dep = infoFor(packageId)
if ( if (
healthCheckId && healthCheckId &&
(dep.requirement.kind !== 'running' || (dep.requirement.kind !== "running" ||
!dep.requirement.healthChecks.includes(healthCheckId)) !dep.requirement.healthChecks.includes(healthCheckId))
) { ) {
throw new Error(`Unknown HealthCheckId ${healthCheckId}`) throw new Error(`Unknown HealthCheckId ${healthCheckId}`)
} }
const errors = const errors =
dep.requirement.kind === 'running' dep.requirement.kind === "running"
? dep.requirement.healthChecks ? dep.requirement.healthChecks
.map((id) => [id, dep.result.healthChecks[id] ?? null] as const) .map((id) => [id, dep.result.healthChecks[id] ?? null] as const)
.filter(([id, _]) => (healthCheckId ? id === healthCheckId : true)) .filter(([id, _]) => (healthCheckId ? id === healthCheckId : true))
.filter(([_, res]) => res?.result !== 'success') .filter(([_, res]) => res?.result !== "success")
: [] : []
return errors.length === 0 return errors.length === 0
} }
@@ -138,7 +138,7 @@ export async function checkDependencies<
} }
const throwIfRunningNotSatisfied = (packageId: DependencyId) => { const throwIfRunningNotSatisfied = (packageId: DependencyId) => {
const dep = infoFor(packageId) const dep = infoFor(packageId)
if (dep.requirement.kind === 'running' && !dep.result.isRunning) { if (dep.requirement.kind === "running" && !dep.result.isRunning) {
throw new Error(`${dep.result.title || packageId} is not running`) throw new Error(`${dep.result.title || packageId} is not running`)
} }
return null return null
@@ -146,11 +146,11 @@ export async function checkDependencies<
const throwIfTasksNotSatisfied = (packageId: DependencyId) => { const throwIfTasksNotSatisfied = (packageId: DependencyId) => {
const dep = infoFor(packageId) const dep = infoFor(packageId)
const reqs = Object.entries(dep.result.tasks) const reqs = Object.entries(dep.result.tasks)
.filter(([_, t]) => t?.active && t.task.severity === 'critical') .filter(([_, t]) => t?.active && t.task.severity === "critical")
.map(([id, _]) => id) .map(([id, _]) => id)
if (reqs.length) { if (reqs.length) {
throw new Error( throw new Error(
`The following action requests have not been fulfilled: ${reqs.join(', ')}`, `The following action requests have not been fulfilled: ${reqs.join(", ")}`,
) )
} }
return null return null
@@ -162,27 +162,27 @@ export async function checkDependencies<
const dep = infoFor(packageId) const dep = infoFor(packageId)
if ( if (
healthCheckId && healthCheckId &&
(dep.requirement.kind !== 'running' || (dep.requirement.kind !== "running" ||
!dep.requirement.healthChecks.includes(healthCheckId)) !dep.requirement.healthChecks.includes(healthCheckId))
) { ) {
throw new Error(`Unknown HealthCheckId ${healthCheckId}`) throw new Error(`Unknown HealthCheckId ${healthCheckId}`)
} }
const errors = const errors =
dep.requirement.kind === 'running' dep.requirement.kind === "running"
? dep.requirement.healthChecks ? dep.requirement.healthChecks
.map((id) => [id, dep.result.healthChecks[id] ?? null] as const) .map((id) => [id, dep.result.healthChecks[id] ?? null] as const)
.filter(([id, _]) => (healthCheckId ? id === healthCheckId : true)) .filter(([id, _]) => (healthCheckId ? id === healthCheckId : true))
.filter(([_, res]) => res?.result !== 'success') .filter(([_, res]) => res?.result !== "success")
: [] : []
if (errors.length) { if (errors.length) {
throw new Error( throw new Error(
errors errors
.map(([id, e]) => .map(([id, e]) =>
e e
? `Health Check ${e.name} of ${dep.result.title || packageId} failed with status ${e.result}${e.message ? `: ${e.message}` : ''}` ? `Health Check ${e.name} of ${dep.result.title || packageId} failed with status ${e.result}${e.message ? `: ${e.message}` : ""}`
: `Health Check ${id} of ${dep.result.title} does not exist`, : `Health Check ${id} of ${dep.result.title} does not exist`,
) )
.join('; '), .join("; "),
) )
} }
return null return null
@@ -209,7 +209,7 @@ export async function checkDependencies<
return [] return []
}) })
if (err.length) { if (err.length) {
throw new Error(err.join('; ')) throw new Error(err.join("; "))
} }
return null return null
})() })()

View File

@@ -1,27 +1,27 @@
import * as T from '../types' import * as T from "../types"
import { once } from '../util' import { once } from "../util"
export type RequiredDependenciesOf<Manifest extends T.SDKManifest> = { export type RequiredDependenciesOf<Manifest extends T.SDKManifest> = {
[K in keyof Manifest['dependencies']]: Exclude< [K in keyof Manifest["dependencies"]]: Exclude<
Manifest['dependencies'][K], Manifest["dependencies"][K],
undefined undefined
>['optional'] extends false >["optional"] extends false
? K ? K
: never : never
}[keyof Manifest['dependencies']] }[keyof Manifest["dependencies"]]
export type OptionalDependenciesOf<Manifest extends T.SDKManifest> = Exclude< export type OptionalDependenciesOf<Manifest extends T.SDKManifest> = Exclude<
keyof Manifest['dependencies'], keyof Manifest["dependencies"],
RequiredDependenciesOf<Manifest> RequiredDependenciesOf<Manifest>
> >
type DependencyRequirement = type DependencyRequirement =
| { | {
kind: 'running' kind: "running"
healthChecks: Array<T.HealthCheckId> healthChecks: Array<T.HealthCheckId>
versionRange: string versionRange: string
} }
| { | {
kind: 'exists' kind: "exists"
versionRange: string versionRange: string
} }
type Matches<T, U> = T extends U ? (U extends T ? null : never) : never type Matches<T, U> = T extends U ? (U extends T ? null : never) : never

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
import { DeepMap } from 'deep-equality-data-structures' import { DeepMap } from "deep-equality-data-structures"
import * as P from './exver' import * as P from "./exver"
// prettier-ignore // prettier-ignore
export type ValidateVersion<T extends String> = export type ValidateVersion<T extends String> =
@@ -22,35 +22,35 @@ export type ValidateExVers<T> =
never[] never[]
type Anchor = { type Anchor = {
type: 'Anchor' type: "Anchor"
operator: P.CmpOp operator: P.CmpOp
version: ExtendedVersion version: ExtendedVersion
} }
type And = { type And = {
type: 'And' type: "And"
left: VersionRange left: VersionRange
right: VersionRange right: VersionRange
} }
type Or = { type Or = {
type: 'Or' type: "Or"
left: VersionRange left: VersionRange
right: VersionRange right: VersionRange
} }
type Not = { type Not = {
type: 'Not' type: "Not"
value: VersionRange value: VersionRange
} }
type Flavor = { type Flavor = {
type: 'Flavor' type: "Flavor"
flavor: string | null flavor: string | null
} }
type FlavorNot = { type FlavorNot = {
type: 'FlavorNot' type: "FlavorNot"
flavors: Set<string | null> flavors: Set<string | null>
} }
@@ -107,8 +107,8 @@ function adjacentVersionRangePoints(
} }
function flavorAnd(a: FlavorAtom, b: FlavorAtom): FlavorAtom | null { function flavorAnd(a: FlavorAtom, b: FlavorAtom): FlavorAtom | null {
if (a.type == 'Flavor') { if (a.type == "Flavor") {
if (b.type == 'Flavor') { if (b.type == "Flavor") {
if (a.flavor == b.flavor) { if (a.flavor == b.flavor) {
return a return a
} else { } else {
@@ -122,7 +122,7 @@ function flavorAnd(a: FlavorAtom, b: FlavorAtom): FlavorAtom | null {
} }
} }
} else { } else {
if (b.type == 'Flavor') { if (b.type == "Flavor") {
if (a.flavors.has(b.flavor)) { if (a.flavors.has(b.flavor)) {
return null return null
} else { } else {
@@ -131,7 +131,7 @@ function flavorAnd(a: FlavorAtom, b: FlavorAtom): FlavorAtom | null {
} else { } else {
// TODO: use Set.union if targeting esnext or later // TODO: use Set.union if targeting esnext or later
return { return {
type: 'FlavorNot', type: "FlavorNot",
flavors: new Set([...a.flavors, ...b.flavors]), flavors: new Set([...a.flavors, ...b.flavors]),
} }
} }
@@ -218,12 +218,12 @@ class VersionRangeTable {
static eqFlavor(flavor: string | null): VersionRangeTables { static eqFlavor(flavor: string | null): VersionRangeTables {
return new DeepMap([ return new DeepMap([
[ [
{ type: 'Flavor', flavor } as FlavorAtom, { type: "Flavor", flavor } as FlavorAtom,
new VersionRangeTable([], [true]), new VersionRangeTable([], [true]),
], ],
// make sure the truth table is exhaustive, or `not` will not work properly. // make sure the truth table is exhaustive, or `not` will not work properly.
[ [
{ type: 'FlavorNot', flavors: new Set([flavor]) } as FlavorAtom, { type: "FlavorNot", flavors: new Set([flavor]) } as FlavorAtom,
new VersionRangeTable([], [false]), new VersionRangeTable([], [false]),
], ],
]) ])
@@ -241,12 +241,12 @@ class VersionRangeTable {
): VersionRangeTables { ): VersionRangeTables {
return new DeepMap([ return new DeepMap([
[ [
{ type: 'Flavor', flavor } as FlavorAtom, { type: "Flavor", flavor } as FlavorAtom,
new VersionRangeTable([point], [left, right]), new VersionRangeTable([point], [left, right]),
], ],
// make sure the truth table is exhaustive, or `not` will not work properly. // make sure the truth table is exhaustive, or `not` will not work properly.
[ [
{ type: 'FlavorNot', flavors: new Set([flavor]) } as FlavorAtom, { type: "FlavorNot", flavors: new Set([flavor]) } as FlavorAtom,
new VersionRangeTable([], [false]), new VersionRangeTable([], [false]),
], ],
]) ])
@@ -383,7 +383,7 @@ class VersionRangeTable {
let sum_terms: VersionRange[] = [] let sum_terms: VersionRange[] = []
for (let [flavor, table] of tables) { for (let [flavor, table] of tables) {
let cmp_flavor = null let cmp_flavor = null
if (flavor.type == 'Flavor') { if (flavor.type == "Flavor") {
cmp_flavor = flavor.flavor cmp_flavor = flavor.flavor
} }
for (let i = 0; i < table.values.length; i++) { for (let i = 0; i < table.values.length; i++) {
@@ -392,7 +392,7 @@ class VersionRangeTable {
continue continue
} }
if (flavor.type == 'FlavorNot') { if (flavor.type == "FlavorNot") {
for (let not_flavor of flavor.flavors) { for (let not_flavor of flavor.flavors) {
term.push(VersionRange.flavor(not_flavor).not()) term.push(VersionRange.flavor(not_flavor).not())
} }
@@ -410,7 +410,7 @@ class VersionRangeTable {
if (p != null && q != null && adjacentVersionRangePoints(p, q)) { if (p != null && q != null && adjacentVersionRangePoints(p, q)) {
term.push( term.push(
VersionRange.anchor( VersionRange.anchor(
'=', "=",
new ExtendedVersion(cmp_flavor, p.upstream, p.downstream), new ExtendedVersion(cmp_flavor, p.upstream, p.downstream),
), ),
) )
@@ -418,7 +418,7 @@ class VersionRangeTable {
if (p != null && p.side < 0) { if (p != null && p.side < 0) {
term.push( term.push(
VersionRange.anchor( VersionRange.anchor(
'>=', ">=",
new ExtendedVersion(cmp_flavor, p.upstream, p.downstream), new ExtendedVersion(cmp_flavor, p.upstream, p.downstream),
), ),
) )
@@ -426,7 +426,7 @@ class VersionRangeTable {
if (p != null && p.side >= 0) { if (p != null && p.side >= 0) {
term.push( term.push(
VersionRange.anchor( VersionRange.anchor(
'>', ">",
new ExtendedVersion(cmp_flavor, p.upstream, p.downstream), new ExtendedVersion(cmp_flavor, p.upstream, p.downstream),
), ),
) )
@@ -434,7 +434,7 @@ class VersionRangeTable {
if (q != null && q.side < 0) { if (q != null && q.side < 0) {
term.push( term.push(
VersionRange.anchor( VersionRange.anchor(
'<', "<",
new ExtendedVersion(cmp_flavor, q.upstream, q.downstream), new ExtendedVersion(cmp_flavor, q.upstream, q.downstream),
), ),
) )
@@ -442,7 +442,7 @@ class VersionRangeTable {
if (q != null && q.side >= 0) { if (q != null && q.side >= 0) {
term.push( term.push(
VersionRange.anchor( VersionRange.anchor(
'<=', "<=",
new ExtendedVersion(cmp_flavor, q.upstream, q.downstream), new ExtendedVersion(cmp_flavor, q.upstream, q.downstream),
), ),
) )
@@ -463,26 +463,26 @@ class VersionRangeTable {
export class VersionRange { export class VersionRange {
constructor(public atom: Anchor | And | Or | Not | P.Any | P.None | Flavor) {} constructor(public atom: Anchor | And | Or | Not | P.Any | P.None | Flavor) {}
toStringParens(parent: 'And' | 'Or' | 'Not') { toStringParens(parent: "And" | "Or" | "Not") {
let needs = true let needs = true
switch (this.atom.type) { switch (this.atom.type) {
case 'And': case "And":
case 'Or': case "Or":
needs = parent != this.atom.type needs = parent != this.atom.type
break break
case 'Anchor': case "Anchor":
case 'Any': case "Any":
case 'None': case "None":
needs = parent == 'Not' needs = parent == "Not"
break break
case 'Not': case "Not":
case 'Flavor': case "Flavor":
needs = false needs = false
break break
} }
if (needs) { if (needs) {
return '(' + this.toString() + ')' return "(" + this.toString() + ")"
} else { } else {
return this.toString() return this.toString()
} }
@@ -490,36 +490,36 @@ export class VersionRange {
toString(): string { toString(): string {
switch (this.atom.type) { switch (this.atom.type) {
case 'Anchor': case "Anchor":
return `${this.atom.operator}${this.atom.version}` return `${this.atom.operator}${this.atom.version}`
case 'And': case "And":
return `${this.atom.left.toStringParens(this.atom.type)} && ${this.atom.right.toStringParens(this.atom.type)}` return `${this.atom.left.toStringParens(this.atom.type)} && ${this.atom.right.toStringParens(this.atom.type)}`
case 'Or': case "Or":
return `${this.atom.left.toStringParens(this.atom.type)} || ${this.atom.right.toStringParens(this.atom.type)}` return `${this.atom.left.toStringParens(this.atom.type)} || ${this.atom.right.toStringParens(this.atom.type)}`
case 'Not': case "Not":
return `!${this.atom.value.toStringParens(this.atom.type)}` return `!${this.atom.value.toStringParens(this.atom.type)}`
case 'Flavor': case "Flavor":
return this.atom.flavor == null ? `#` : `#${this.atom.flavor}` return this.atom.flavor == null ? `#` : `#${this.atom.flavor}`
case 'Any': case "Any":
return '*' return "*"
case 'None': case "None":
return '!' return "!"
} }
} }
private static parseAtom(atom: P.VersionRangeAtom): VersionRange { private static parseAtom(atom: P.VersionRangeAtom): VersionRange {
switch (atom.type) { switch (atom.type) {
case 'Not': case "Not":
return new VersionRange({ return new VersionRange({
type: 'Not', type: "Not",
value: VersionRange.parseAtom(atom.value), value: VersionRange.parseAtom(atom.value),
}) })
case 'Parens': case "Parens":
return VersionRange.parseRange(atom.expr) return VersionRange.parseRange(atom.expr)
case 'Anchor': case "Anchor":
return new VersionRange({ return new VersionRange({
type: 'Anchor', type: "Anchor",
operator: atom.operator || '^', operator: atom.operator || "^",
version: new ExtendedVersion( version: new ExtendedVersion(
atom.version.flavor, atom.version.flavor,
new Version( new Version(
@@ -532,7 +532,7 @@ export class VersionRange {
), ),
), ),
}) })
case 'Flavor': case "Flavor":
return VersionRange.flavor(atom.flavor) return VersionRange.flavor(atom.flavor)
default: default:
return new VersionRange(atom) return new VersionRange(atom)
@@ -543,17 +543,17 @@ export class VersionRange {
let result = VersionRange.parseAtom(range[0]) let result = VersionRange.parseAtom(range[0])
for (const next of range[1]) { for (const next of range[1]) {
switch (next[1]?.[0]) { switch (next[1]?.[0]) {
case '||': case "||":
result = new VersionRange({ result = new VersionRange({
type: 'Or', type: "Or",
left: result, left: result,
right: VersionRange.parseAtom(next[2]), right: VersionRange.parseAtom(next[2]),
}) })
break break
case '&&': case "&&":
default: default:
result = new VersionRange({ result = new VersionRange({
type: 'And', type: "And",
left: result, left: result,
right: VersionRange.parseAtom(next[2]), right: VersionRange.parseAtom(next[2]),
}) })
@@ -565,49 +565,49 @@ export class VersionRange {
static parse(range: string): VersionRange { static parse(range: string): VersionRange {
return VersionRange.parseRange( return VersionRange.parseRange(
P.parse(range, { startRule: 'VersionRange' }), P.parse(range, { startRule: "VersionRange" }),
) )
} }
static anchor(operator: P.CmpOp, version: ExtendedVersion) { static anchor(operator: P.CmpOp, version: ExtendedVersion) {
return new VersionRange({ type: 'Anchor', operator, version }) return new VersionRange({ type: "Anchor", operator, version })
} }
static flavor(flavor: string | null) { static flavor(flavor: string | null) {
return new VersionRange({ type: 'Flavor', flavor }) return new VersionRange({ type: "Flavor", flavor })
} }
static parseEmver(range: string): VersionRange { static parseEmver(range: string): VersionRange {
return VersionRange.parseRange( return VersionRange.parseRange(
P.parse(range, { startRule: 'EmverVersionRange' }), P.parse(range, { startRule: "EmverVersionRange" }),
) )
} }
and(right: VersionRange) { and(right: VersionRange) {
return new VersionRange({ type: 'And', left: this, right }) return new VersionRange({ type: "And", left: this, right })
} }
or(right: VersionRange) { or(right: VersionRange) {
return new VersionRange({ type: 'Or', left: this, right }) return new VersionRange({ type: "Or", left: this, right })
} }
not() { not() {
return new VersionRange({ type: 'Not', value: this }) return new VersionRange({ type: "Not", value: this })
} }
static and(...xs: Array<VersionRange>) { static and(...xs: Array<VersionRange>) {
let y = VersionRange.any() let y = VersionRange.any()
for (let x of xs) { for (let x of xs) {
if (x.atom.type == 'Any') { if (x.atom.type == "Any") {
continue continue
} }
if (x.atom.type == 'None') { if (x.atom.type == "None") {
return x return x
} }
if (y.atom.type == 'Any') { if (y.atom.type == "Any") {
y = x y = x
} else { } else {
y = new VersionRange({ type: 'And', left: y, right: x }) y = new VersionRange({ type: "And", left: y, right: x })
} }
} }
return y return y
@@ -616,27 +616,27 @@ export class VersionRange {
static or(...xs: Array<VersionRange>) { static or(...xs: Array<VersionRange>) {
let y = VersionRange.none() let y = VersionRange.none()
for (let x of xs) { for (let x of xs) {
if (x.atom.type == 'None') { if (x.atom.type == "None") {
continue continue
} }
if (x.atom.type == 'Any') { if (x.atom.type == "Any") {
return x return x
} }
if (y.atom.type == 'None') { if (y.atom.type == "None") {
y = x y = x
} else { } else {
y = new VersionRange({ type: 'Or', left: y, right: x }) y = new VersionRange({ type: "Or", left: y, right: x })
} }
} }
return y return y
} }
static any() { static any() {
return new VersionRange({ type: 'Any' }) return new VersionRange({ type: "Any" })
} }
static none() { static none() {
return new VersionRange({ type: 'None' }) return new VersionRange({ type: "None" })
} }
satisfiedBy(version: Version | ExtendedVersion) { satisfiedBy(version: Version | ExtendedVersion) {
@@ -645,23 +645,23 @@ export class VersionRange {
tables(): VersionRangeTables { tables(): VersionRangeTables {
switch (this.atom.type) { switch (this.atom.type) {
case 'Anchor': case "Anchor":
switch (this.atom.operator) { switch (this.atom.operator) {
case '=': case "=":
// `=1.2.3` is equivalent to `>=1.2.3 && <=1.2.4 && #flavor` // `=1.2.3` is equivalent to `>=1.2.3 && <=1.2.4 && #flavor`
return VersionRangeTable.and( return VersionRangeTable.and(
VersionRangeTable.cmp(this.atom.version, -1, false, true), VersionRangeTable.cmp(this.atom.version, -1, false, true),
VersionRangeTable.cmp(this.atom.version, 1, true, false), VersionRangeTable.cmp(this.atom.version, 1, true, false),
) )
case '>': case ">":
return VersionRangeTable.cmp(this.atom.version, 1, false, true) return VersionRangeTable.cmp(this.atom.version, 1, false, true)
case '<': case "<":
return VersionRangeTable.cmp(this.atom.version, -1, true, false) return VersionRangeTable.cmp(this.atom.version, -1, true, false)
case '>=': case ">=":
return VersionRangeTable.cmp(this.atom.version, -1, false, true) return VersionRangeTable.cmp(this.atom.version, -1, false, true)
case '<=': case "<=":
return VersionRangeTable.cmp(this.atom.version, 1, true, false) return VersionRangeTable.cmp(this.atom.version, 1, true, false)
case '!=': case "!=":
// `!=1.2.3` is equivalent to `!(>=1.2.3 && <=1.2.3 && #flavor)` // `!=1.2.3` is equivalent to `!(>=1.2.3 && <=1.2.3 && #flavor)`
// **not** equivalent to `(<1.2.3 || >1.2.3) && #flavor` // **not** equivalent to `(<1.2.3 || >1.2.3) && #flavor`
return VersionRangeTable.not( return VersionRangeTable.not(
@@ -670,7 +670,7 @@ export class VersionRange {
VersionRangeTable.cmp(this.atom.version, 1, true, false), VersionRangeTable.cmp(this.atom.version, 1, true, false),
), ),
) )
case '^': case "^":
// `^1.2.3` is equivalent to `>=1.2.3 && <2.0.0 && #flavor` // `^1.2.3` is equivalent to `>=1.2.3 && <2.0.0 && #flavor`
return VersionRangeTable.and( return VersionRangeTable.and(
VersionRangeTable.cmp(this.atom.version, -1, false, true), VersionRangeTable.cmp(this.atom.version, -1, false, true),
@@ -681,7 +681,7 @@ export class VersionRange {
false, false,
), ),
) )
case '~': case "~":
// `~1.2.3` is equivalent to `>=1.2.3 && <1.3.0 && #flavor` // `~1.2.3` is equivalent to `>=1.2.3 && <1.3.0 && #flavor`
return VersionRangeTable.and( return VersionRangeTable.and(
VersionRangeTable.cmp(this.atom.version, -1, false, true), VersionRangeTable.cmp(this.atom.version, -1, false, true),
@@ -693,23 +693,23 @@ export class VersionRange {
), ),
) )
} }
case 'Flavor': case "Flavor":
return VersionRangeTable.eqFlavor(this.atom.flavor) return VersionRangeTable.eqFlavor(this.atom.flavor)
case 'Not': case "Not":
return VersionRangeTable.not(this.atom.value.tables()) return VersionRangeTable.not(this.atom.value.tables())
case 'And': case "And":
return VersionRangeTable.and( return VersionRangeTable.and(
this.atom.left.tables(), this.atom.left.tables(),
this.atom.right.tables(), this.atom.right.tables(),
) )
case 'Or': case "Or":
return VersionRangeTable.or( return VersionRangeTable.or(
this.atom.left.tables(), this.atom.left.tables(),
this.atom.right.tables(), this.atom.right.tables(),
) )
case 'Any': case "Any":
return true return true
case 'None': case "None":
return false return false
} }
} }
@@ -734,23 +734,23 @@ export class Version {
) {} ) {}
toString(): string { toString(): string {
return `${this.number.join('.')}${this.prerelease.length > 0 ? `-${this.prerelease.join('.')}` : ''}` return `${this.number.join(".")}${this.prerelease.length > 0 ? `-${this.prerelease.join(".")}` : ""}`
} }
compare(other: Version): 'greater' | 'equal' | 'less' { compare(other: Version): "greater" | "equal" | "less" {
const numLen = Math.max(this.number.length, other.number.length) const numLen = Math.max(this.number.length, other.number.length)
for (let i = 0; i < numLen; i++) { for (let i = 0; i < numLen; i++) {
if ((this.number[i] || 0) > (other.number[i] || 0)) { if ((this.number[i] || 0) > (other.number[i] || 0)) {
return 'greater' return "greater"
} else if ((this.number[i] || 0) < (other.number[i] || 0)) { } else if ((this.number[i] || 0) < (other.number[i] || 0)) {
return 'less' return "less"
} }
} }
if (this.prerelease.length === 0 && other.prerelease.length !== 0) { if (this.prerelease.length === 0 && other.prerelease.length !== 0) {
return 'greater' return "greater"
} else if (this.prerelease.length !== 0 && other.prerelease.length === 0) { } else if (this.prerelease.length !== 0 && other.prerelease.length === 0) {
return 'less' return "less"
} }
const prereleaseLen = Math.max( const prereleaseLen = Math.max(
@@ -760,42 +760,42 @@ export class Version {
for (let i = 0; i < prereleaseLen; i++) { for (let i = 0; i < prereleaseLen; i++) {
if (typeof this.prerelease[i] === typeof other.prerelease[i]) { if (typeof this.prerelease[i] === typeof other.prerelease[i]) {
if (this.prerelease[i] > other.prerelease[i]) { if (this.prerelease[i] > other.prerelease[i]) {
return 'greater' return "greater"
} else if (this.prerelease[i] < other.prerelease[i]) { } else if (this.prerelease[i] < other.prerelease[i]) {
return 'less' return "less"
} }
} else { } else {
switch (`${typeof this.prerelease[1]}:${typeof other.prerelease[i]}`) { switch (`${typeof this.prerelease[1]}:${typeof other.prerelease[i]}`) {
case 'number:string': case "number:string":
return 'less' return "less"
case 'string:number': case "string:number":
return 'greater' return "greater"
case 'number:undefined': case "number:undefined":
case 'string:undefined': case "string:undefined":
return 'greater' return "greater"
case 'undefined:number': case "undefined:number":
case 'undefined:string': case "undefined:string":
return 'less' return "less"
} }
} }
} }
return 'equal' return "equal"
} }
compareForSort(other: Version): -1 | 0 | 1 { compareForSort(other: Version): -1 | 0 | 1 {
switch (this.compare(other)) { switch (this.compare(other)) {
case 'greater': case "greater":
return 1 return 1
case 'equal': case "equal":
return 0 return 0
case 'less': case "less":
return -1 return -1
} }
} }
static parse(version: string): Version { static parse(version: string): Version {
const parsed = P.parse(version, { startRule: 'Version' }) const parsed = P.parse(version, { startRule: "Version" })
return new Version(parsed.number, parsed.prerelease) return new Version(parsed.number, parsed.prerelease)
} }
@@ -815,25 +815,25 @@ export class ExtendedVersion {
) {} ) {}
toString(): string { toString(): string {
return `${this.flavor ? `#${this.flavor}:` : ''}${this.upstream.toString()}:${this.downstream.toString()}` return `${this.flavor ? `#${this.flavor}:` : ""}${this.upstream.toString()}:${this.downstream.toString()}`
} }
compare(other: ExtendedVersion): 'greater' | 'equal' | 'less' | null { compare(other: ExtendedVersion): "greater" | "equal" | "less" | null {
if (this.flavor !== other.flavor) { if (this.flavor !== other.flavor) {
return null return null
} }
const upstreamCmp = this.upstream.compare(other.upstream) const upstreamCmp = this.upstream.compare(other.upstream)
if (upstreamCmp !== 'equal') { if (upstreamCmp !== "equal") {
return upstreamCmp return upstreamCmp
} }
return this.downstream.compare(other.downstream) return this.downstream.compare(other.downstream)
} }
compareLexicographic(other: ExtendedVersion): 'greater' | 'equal' | 'less' { compareLexicographic(other: ExtendedVersion): "greater" | "equal" | "less" {
if ((this.flavor || '') > (other.flavor || '')) { if ((this.flavor || "") > (other.flavor || "")) {
return 'greater' return "greater"
} else if ((this.flavor || '') > (other.flavor || '')) { } else if ((this.flavor || "") > (other.flavor || "")) {
return 'less' return "less"
} else { } else {
return this.compare(other)! return this.compare(other)!
} }
@@ -841,37 +841,37 @@ export class ExtendedVersion {
compareForSort(other: ExtendedVersion): 1 | 0 | -1 { compareForSort(other: ExtendedVersion): 1 | 0 | -1 {
switch (this.compareLexicographic(other)) { switch (this.compareLexicographic(other)) {
case 'greater': case "greater":
return 1 return 1
case 'equal': case "equal":
return 0 return 0
case 'less': case "less":
return -1 return -1
} }
} }
greaterThan(other: ExtendedVersion): boolean { greaterThan(other: ExtendedVersion): boolean {
return this.compare(other) === 'greater' return this.compare(other) === "greater"
} }
greaterThanOrEqual(other: ExtendedVersion): boolean { greaterThanOrEqual(other: ExtendedVersion): boolean {
return ['greater', 'equal'].includes(this.compare(other) as string) return ["greater", "equal"].includes(this.compare(other) as string)
} }
equals(other: ExtendedVersion): boolean { equals(other: ExtendedVersion): boolean {
return this.compare(other) === 'equal' return this.compare(other) === "equal"
} }
lessThan(other: ExtendedVersion): boolean { lessThan(other: ExtendedVersion): boolean {
return this.compare(other) === 'less' return this.compare(other) === "less"
} }
lessThanOrEqual(other: ExtendedVersion): boolean { lessThanOrEqual(other: ExtendedVersion): boolean {
return ['less', 'equal'].includes(this.compare(other) as string) return ["less", "equal"].includes(this.compare(other) as string)
} }
static parse(extendedVersion: string): ExtendedVersion { static parse(extendedVersion: string): ExtendedVersion {
const parsed = P.parse(extendedVersion, { startRule: 'ExtendedVersion' }) const parsed = P.parse(extendedVersion, { startRule: "ExtendedVersion" })
return new ExtendedVersion( return new ExtendedVersion(
parsed.flavor || null, parsed.flavor || null,
new Version(parsed.upstream.number, parsed.upstream.prerelease), new Version(parsed.upstream.number, parsed.upstream.prerelease),
@@ -881,7 +881,7 @@ export class ExtendedVersion {
static parseEmver(extendedVersion: string): ExtendedVersion { static parseEmver(extendedVersion: string): ExtendedVersion {
try { try {
const parsed = P.parse(extendedVersion, { startRule: 'Emver' }) const parsed = P.parse(extendedVersion, { startRule: "Emver" })
return new ExtendedVersion( return new ExtendedVersion(
parsed.flavor || null, parsed.flavor || null,
new Version(parsed.upstream.number, parsed.upstream.prerelease), new Version(parsed.upstream.number, parsed.upstream.prerelease),
@@ -956,22 +956,22 @@ export class ExtendedVersion {
*/ */
satisfies(versionRange: VersionRange): boolean { satisfies(versionRange: VersionRange): boolean {
switch (versionRange.atom.type) { switch (versionRange.atom.type) {
case 'Anchor': case "Anchor":
const otherVersion = versionRange.atom.version const otherVersion = versionRange.atom.version
switch (versionRange.atom.operator) { switch (versionRange.atom.operator) {
case '=': case "=":
return this.equals(otherVersion) return this.equals(otherVersion)
case '>': case ">":
return this.greaterThan(otherVersion) return this.greaterThan(otherVersion)
case '<': case "<":
return this.lessThan(otherVersion) return this.lessThan(otherVersion)
case '>=': case ">=":
return this.greaterThanOrEqual(otherVersion) return this.greaterThanOrEqual(otherVersion)
case '<=': case "<=":
return this.lessThanOrEqual(otherVersion) return this.lessThanOrEqual(otherVersion)
case '!=': case "!=":
return !this.equals(otherVersion) return !this.equals(otherVersion)
case '^': case "^":
const nextMajor = versionRange.atom.version.incrementMajor() const nextMajor = versionRange.atom.version.incrementMajor()
if ( if (
this.greaterThanOrEqual(otherVersion) && this.greaterThanOrEqual(otherVersion) &&
@@ -981,7 +981,7 @@ export class ExtendedVersion {
} else { } else {
return false return false
} }
case '~': case "~":
const nextMinor = versionRange.atom.version.incrementMinor() const nextMinor = versionRange.atom.version.incrementMinor()
if ( if (
this.greaterThanOrEqual(otherVersion) && this.greaterThanOrEqual(otherVersion) &&
@@ -992,23 +992,23 @@ export class ExtendedVersion {
return false return false
} }
} }
case 'Flavor': case "Flavor":
return versionRange.atom.flavor == this.flavor return versionRange.atom.flavor == this.flavor
case 'And': case "And":
return ( return (
this.satisfies(versionRange.atom.left) && this.satisfies(versionRange.atom.left) &&
this.satisfies(versionRange.atom.right) this.satisfies(versionRange.atom.right)
) )
case 'Or': case "Or":
return ( return (
this.satisfies(versionRange.atom.left) || this.satisfies(versionRange.atom.left) ||
this.satisfies(versionRange.atom.right) this.satisfies(versionRange.atom.right)
) )
case 'Not': case "Not":
return !this.satisfies(versionRange.atom.value) return !this.satisfies(versionRange.atom.value)
case 'Any': case "Any":
return true return true
case 'None': case "None":
return false return false
} }
} }
@@ -1020,34 +1020,34 @@ export const testTypeVersion = <T extends string>(t: T & ValidateVersion<T>) =>
t t
function tests() { function tests() {
testTypeVersion('1.2.3') testTypeVersion("1.2.3")
testTypeVersion('1') testTypeVersion("1")
testTypeVersion('12.34.56') testTypeVersion("12.34.56")
testTypeVersion('1.2-3') testTypeVersion("1.2-3")
testTypeVersion('1-3') testTypeVersion("1-3")
testTypeVersion('1-alpha') testTypeVersion("1-alpha")
// @ts-expect-error // @ts-expect-error
testTypeVersion('-3') testTypeVersion("-3")
// @ts-expect-error // @ts-expect-error
testTypeVersion('1.2.3:1') testTypeVersion("1.2.3:1")
// @ts-expect-error // @ts-expect-error
testTypeVersion('#cat:1:1') testTypeVersion("#cat:1:1")
testTypeExVer('1.2.3:1.2.3') testTypeExVer("1.2.3:1.2.3")
testTypeExVer('1.2.3.4.5.6.7.8.9.0:1') testTypeExVer("1.2.3.4.5.6.7.8.9.0:1")
testTypeExVer('100:1') testTypeExVer("100:1")
testTypeExVer('#cat:1:1') testTypeExVer("#cat:1:1")
testTypeExVer('1.2.3.4.5.6.7.8.9.11.22.33:1') testTypeExVer("1.2.3.4.5.6.7.8.9.11.22.33:1")
testTypeExVer('1-0:1') testTypeExVer("1-0:1")
testTypeExVer('1-0:1') testTypeExVer("1-0:1")
// @ts-expect-error // @ts-expect-error
testTypeExVer('1.2-3') testTypeExVer("1.2-3")
// @ts-expect-error // @ts-expect-error
testTypeExVer('1-3') testTypeExVer("1-3")
// @ts-expect-error // @ts-expect-error
testTypeExVer('1.2.3.4.5.6.7.8.9.0.10:1' as string) testTypeExVer("1.2.3.4.5.6.7.8.9.0.10:1" as string)
// @ts-expect-error // @ts-expect-error
testTypeExVer('1.-2:1') testTypeExVer("1.-2:1")
// @ts-expect-error // @ts-expect-error
testTypeExVer('1..2.3:3') testTypeExVer("1..2.3:3")
} }

View File

@@ -1,13 +1,13 @@
export { S9pk } from './s9pk' export { S9pk } from "./s9pk"
export { VersionRange, ExtendedVersion, Version } from './exver' export { VersionRange, ExtendedVersion, Version } from "./exver"
export * as inputSpec from './actions/input' export * as inputSpec from "./actions/input"
export * as ISB from './actions/input/builder' export * as ISB from "./actions/input/builder"
export * as IST from './actions/input/inputSpecTypes' export * as IST from "./actions/input/inputSpecTypes"
export * as types from './types' export * as types from "./types"
export * as T from './types' export * as T from "./types"
export * as yaml from 'yaml' export * as yaml from "yaml"
export * as inits from './inits' export * as inits from "./inits"
export * as matches from 'ts-matches' export * as matches from "ts-matches"
export * as utils from './util' export * as utils from "./util"

View File

@@ -1,2 +1,2 @@
export * from './setupInit' export * from "./setupInit"
export * from './setupUninit' export * from "./setupUninit"

View File

@@ -1,8 +1,8 @@
import { VersionRange } from '../../../base/lib/exver' import { VersionRange } from "../../../base/lib/exver"
import * as T from '../../../base/lib/types' import * as T from "../../../base/lib/types"
import { once } from '../util' import { once } from "../util"
export type InitKind = 'install' | 'update' | 'restore' | null export type InitKind = "install" | "update" | "restore" | null
export type InitFn<Kind extends InitKind = InitKind> = ( export type InitFn<Kind extends InitKind = InitKind> = (
effects: T.Effects, effects: T.Effects,
@@ -31,7 +31,7 @@ export function setupInit(...inits: InitScriptOrFn[]): T.ExpectedExports.init {
complete.then(() => fn()).catch(console.error), complete.then(() => fn()).catch(console.error),
) )
try { try {
if ('init' in init) await init.init(e, opts.kind) if ("init" in init) await init.init(e, opts.kind)
else await init(e, opts.kind) else await init(e, opts.kind)
} finally { } finally {
res() res()
@@ -43,7 +43,7 @@ export function setupInit(...inits: InitScriptOrFn[]): T.ExpectedExports.init {
} }
export function setupOnInit(onInit: InitScriptOrFn): InitScript { export function setupOnInit(onInit: InitScriptOrFn): InitScript {
return 'init' in onInit return "init" in onInit
? onInit ? onInit
: { : {
init: async (effects, kind) => { init: async (effects, kind) => {

View File

@@ -1,5 +1,5 @@
import { ExtendedVersion, VersionRange } from '../../../base/lib/exver' import { ExtendedVersion, VersionRange } from "../../../base/lib/exver"
import * as T from '../../../base/lib/types' import * as T from "../../../base/lib/types"
export type UninitFn = ( export type UninitFn = (
effects: T.Effects, effects: T.Effects,
@@ -34,14 +34,14 @@ export function setupUninit(
): T.ExpectedExports.uninit { ): T.ExpectedExports.uninit {
return async (opts) => { return async (opts) => {
for (const uninit of uninits) { for (const uninit of uninits) {
if ('uninit' in uninit) await uninit.uninit(opts.effects, opts.target) if ("uninit" in uninit) await uninit.uninit(opts.effects, opts.target)
else await uninit(opts.effects, opts.target) else await uninit(opts.effects, opts.target)
} }
} }
} }
export function setupOnUninit(onUninit: UninitScriptOrFn): UninitScript { export function setupOnUninit(onUninit: UninitScriptOrFn): UninitScript {
return 'uninit' in onUninit return "uninit" in onUninit
? onUninit ? onUninit
: { : {
uninit: async (effects, target) => { uninit: async (effects, target) => {

View File

@@ -1,10 +1,10 @@
import { object, string } from 'ts-matches' import { object, string } from "ts-matches"
import { Effects } from '../Effects' import { Effects } from "../Effects"
import { Origin } from './Origin' import { Origin } from "./Origin"
import { AddSslOptions, BindParams } from '../osBindings' import { AddSslOptions, BindParams } from "../osBindings"
import { Security } from '../osBindings' import { Security } from "../osBindings"
import { BindOptions } from '../osBindings' import { BindOptions } from "../osBindings"
import { AlpnInfo } from '../osBindings' import { AlpnInfo } from "../osBindings"
export { AddSslOptions, Security, BindOptions } export { AddSslOptions, Security, BindOptions }
@@ -12,8 +12,8 @@ export const knownProtocols = {
http: { http: {
secure: null, secure: null,
defaultPort: 80, defaultPort: 80,
withSsl: 'https', withSsl: "https",
alpn: { specified: ['http/1.1'] } as AlpnInfo, alpn: { specified: ["http/1.1"] } as AlpnInfo,
}, },
https: { https: {
secure: { ssl: true }, secure: { ssl: true },
@@ -22,8 +22,8 @@ export const knownProtocols = {
ws: { ws: {
secure: null, secure: null,
defaultPort: 80, defaultPort: 80,
withSsl: 'wss', withSsl: "wss",
alpn: { specified: ['http/1.1'] } as AlpnInfo, alpn: { specified: ["http/1.1"] } as AlpnInfo,
}, },
wss: { wss: {
secure: { ssl: true }, secure: { ssl: true },
@@ -140,8 +140,8 @@ export class MultiHost {
addXForwardedHeaders: false, addXForwardedHeaders: false,
preferredExternalPort: knownProtocols[sslProto].defaultPort, preferredExternalPort: knownProtocols[sslProto].defaultPort,
scheme: sslProto, scheme: sslProto,
alpn: 'alpn' in protoInfo ? protoInfo.alpn : null, alpn: "alpn" in protoInfo ? protoInfo.alpn : null,
...('addSsl' in options ? options.addSsl : null), ...("addSsl" in options ? options.addSsl : null),
} }
: options.addSsl : options.addSsl
? { ? {
@@ -149,7 +149,7 @@ export class MultiHost {
preferredExternalPort: 443, preferredExternalPort: 443,
scheme: sslProto, scheme: sslProto,
alpn: null, alpn: null,
...('addSsl' in options ? options.addSsl : null), ...("addSsl" in options ? options.addSsl : null),
} }
: null : null
@@ -169,8 +169,8 @@ export class MultiHost {
private getSslProto(options: BindOptionsByKnownProtocol) { private getSslProto(options: BindOptionsByKnownProtocol) {
const proto = options.protocol const proto = options.protocol
const protoInfo = knownProtocols[proto] const protoInfo = knownProtocols[proto]
if (inObject('noAddSsl', options) && options.noAddSsl) return null if (inObject("noAddSsl", options) && options.noAddSsl) return null
if ('withSsl' in protoInfo && protoInfo.withSsl) return protoInfo.withSsl if ("withSsl" in protoInfo && protoInfo.withSsl) return protoInfo.withSsl
if (protoInfo.secure?.ssl) return proto if (protoInfo.secure?.ssl) return proto
return null return null
} }

View File

@@ -1,7 +1,7 @@
import { AddressInfo } from '../types' import { AddressInfo } from "../types"
import { AddressReceipt } from './AddressReceipt' import { AddressReceipt } from "./AddressReceipt"
import { MultiHost, Scheme } from './Host' import { MultiHost, Scheme } from "./Host"
import { ServiceInterfaceBuilder } from './ServiceInterfaceBuilder' import { ServiceInterfaceBuilder } from "./ServiceInterfaceBuilder"
export class Origin { export class Origin {
constructor( constructor(
@@ -21,9 +21,9 @@ export class Origin {
.map( .map(
([key, val]) => `${encodeURIComponent(key)}=${encodeURIComponent(val)}`, ([key, val]) => `${encodeURIComponent(key)}=${encodeURIComponent(val)}`,
) )
.join('&') .join("&")
const qp = qpEntries.length ? `?${qpEntries}` : '' const qp = qpEntries.length ? `?${qpEntries}` : ""
return { return {
hostId: this.host.options.id, hostId: this.host.options.id,

View File

@@ -1,6 +1,6 @@
import { ServiceInterfaceType } from '../types' import { ServiceInterfaceType } from "../types"
import { Effects } from '../Effects' import { Effects } from "../Effects"
import { Scheme } from './Host' import { Scheme } from "./Host"
/** /**
* A helper class for creating a Network Interface * A helper class for creating a Network Interface

View File

@@ -1,6 +1,6 @@
import * as T from '../types' import * as T from "../types"
import { once } from '../util' import { once } from "../util"
import { AddressReceipt } from './AddressReceipt' import { AddressReceipt } from "./AddressReceipt"
declare const UpdateServiceInterfacesProof: unique symbol declare const UpdateServiceInterfacesProof: unique symbol
export type UpdateServiceInterfacesReceipt = { export type UpdateServiceInterfacesReceipt = {

View File

@@ -1,5 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { AnyVerifyingKey } from './AnyVerifyingKey' import type { AnyVerifyingKey } from "./AnyVerifyingKey"
export type AcceptSigners = export type AcceptSigners =
| { signer: AnyVerifyingKey } | { signer: AnyVerifyingKey }

Some files were not shown because too many files have changed in this diff Show More