Merge pull request #2684 from Start9Labs/bugfix/misc

miscellaneous fixes from alpha testing
This commit is contained in:
Aiden McClelland
2024-08-06 16:53:37 -06:00
committed by GitHub
113 changed files with 9608 additions and 1064 deletions

View File

@@ -83,7 +83,7 @@ jobs:
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v3
- name: Set up system dependencies - name: Set up system dependencies
run: sudo apt-get update && sudo apt-get install -y qemu-user-static systemd-container run: sudo apt-get update && sudo apt-get install -y qemu-user-static systemd-container squashfuse
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3

View File

@@ -47,7 +47,7 @@ endif
.DELETE_ON_ERROR: .DELETE_ON_ERROR:
.PHONY: all metadata install clean format cli uis ui reflash deb $(IMAGE_TYPE) squashfs sudo wormhole wormhole-deb test .PHONY: all metadata install clean format cli uis ui reflash deb $(IMAGE_TYPE) squashfs sudo wormhole wormhole-deb test test-core test-sdk test-container-runtime
all: $(ALL_TARGETS) all: $(ALL_TARGETS)
@@ -89,9 +89,16 @@ clean:
format: format:
cd core && cargo +nightly fmt cd core && cargo +nightly fmt
test: $(CORE_SRC) $(ENVIRONMENT_FILE) test: | test-core test-sdk test-container-runtime
(cd core && cargo build --features=test && cargo test --features=test)
(cd sdk && make test) test-core: $(CORE_SRC) $(ENVIRONMENT_FILE)
cd core && cargo build --features=test && cargo test --features=test
test-sdk: $(shell git ls-files sdk) sdk/lib/osBindings
cd sdk && make test
test-container-runtime: container-runtime/node_modules $(shell git ls-files container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json
cd container-runtime && npm test
cli: cli:
cd core && ./install-cli.sh cd core && ./install-cli.sh
@@ -224,7 +231,7 @@ sdk/lib/osBindings: core/startos/bindings
core/startos/bindings: $(shell git ls-files core) $(ENVIRONMENT_FILE) core/startos/bindings: $(shell git ls-files core) $(ENVIRONMENT_FILE)
rm -rf core/startos/bindings rm -rf core/startos/bindings
(cd core/ && cargo test --features=test '::export_bindings_') (cd core/ && cargo test --features=test 'export_bindings_')
touch core/startos/bindings touch core/startos/bindings
sdk/dist: $(shell git ls-files sdk) sdk/lib/osBindings sdk/dist: $(shell git ls-files sdk) sdk/lib/osBindings

View File

@@ -14,14 +14,8 @@ if ! id kiosk; then
useradd -s /bin/bash --create-home kiosk useradd -s /bin/bash --create-home kiosk
fi fi
# create kiosk script mkdir /home/kiosk/fx-profile
cat > /home/kiosk/kiosk.sh << 'EOF' cat >> /home/kiosk/fx-profile/prefs.js << EOF
#!/bin/sh
PROFILE=$(mktemp -d)
if [ -f /usr/local/share/ca-certificates/startos-root-ca.crt ]; then
certutil -A -n "StartOS Local Root CA" -t "TCu,Cuw,Tuw" -i /usr/local/share/ca-certificates/startos-root-ca.crt -d $PROFILE
fi
cat >> $PROFILE/prefs.js << EOT
user_pref("app.normandy.api_url", ""); user_pref("app.normandy.api_url", "");
user_pref("app.normandy.enabled", false); user_pref("app.normandy.enabled", false);
user_pref("app.shield.optoutstudies.enabled", false); user_pref("app.shield.optoutstudies.enabled", false);
@@ -87,7 +81,11 @@ user_pref("toolkit.telemetry.shutdownPingSender.enabled", false);
user_pref("toolkit.telemetry.unified", false); user_pref("toolkit.telemetry.unified", false);
user_pref("toolkit.telemetry.updatePing.enabled", false); user_pref("toolkit.telemetry.updatePing.enabled", false);
user_pref("toolkit.telemetry.cachedClientID", ""); user_pref("toolkit.telemetry.cachedClientID", "");
EOT EOF
# create kiosk script
cat > /home/kiosk/kiosk.sh << 'EOF'
#!/bin/sh
while ! curl "http://localhost" > /dev/null; do while ! curl "http://localhost" > /dev/null; do
sleep 1 sleep 1
done done
@@ -101,8 +99,7 @@ done
killall firefox-esr killall firefox-esr
) & ) &
matchbox-window-manager -use_titlebar no & matchbox-window-manager -use_titlebar no &
firefox-esr http://localhost --profile $PROFILE firefox-esr http://localhost --profile /home/kiosk/fx-profile
rm -rf $PROFILE
EOF EOF
chmod +x /home/kiosk/kiosk.sh chmod +x /home/kiosk/kiosk.sh
@@ -116,6 +113,8 @@ fi
EOF EOF
fi fi
chown -R kiosk:kiosk /home/kiosk
# enable autologin # enable autologin
mkdir -p /etc/systemd/system/getty@tty1.service.d mkdir -p /etc/systemd/system/getty@tty1.service.d
cat > /etc/systemd/system/getty@tty1.service.d/autologin.conf << 'EOF' cat > /etc/systemd/system/getty@tty1.service.d/autologin.conf << 'EOF'

View File

@@ -16,4 +16,8 @@ elif [ "$_ARCH" = "aarch64" ]; then
_ARCH=arm64 _ARCH=arm64
fi fi
curl https://images.linuxcontainers.org/$(curl --silent https://images.linuxcontainers.org/meta/1.0/index-system | grep "^$DISTRO;$VERSION;$_ARCH;$FLAVOR;" | head -n1 | sed 's/^.*;//g')/rootfs.squashfs --output debian.${ARCH}.squashfs URL="https://images.linuxcontainers.org/$(curl -fsSL https://images.linuxcontainers.org/meta/1.0/index-system | grep "^$DISTRO;$VERSION;$_ARCH;$FLAVOR;" | head -n1 | sed 's/^.*;//g')/rootfs.squashfs"
echo "Downloading $URL to debian.${ARCH}.squashfs"
curl -fsSL "$URL" > debian.${ARCH}.squashfs

View File

@@ -0,0 +1,8 @@
/** @type {import('ts-jest').JestConfigWithTsJest} */
module.exports = {
preset: "ts-jest",
automock: false,
testEnvironment: "node",
rootDir: "./src/",
modulePathIgnorePatterns: ["./dist/"],
}

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,8 @@
"scripts": { "scripts": {
"check": "tsc --noEmit", "check": "tsc --noEmit",
"build": "prettier . '!tmp/**' --write && rm -rf dist && tsc", "build": "prettier . '!tmp/**' --write && rm -rf dist && tsc",
"tsc": "rm -rf dist; tsc" "tsc": "rm -rf dist; tsc",
"test": "jest -c ./jest.config.js"
}, },
"author": "", "author": "",
"prettier": { "prettier": {
@@ -17,12 +18,13 @@
}, },
"dependencies": { "dependencies": {
"@iarna/toml": "^2.2.5", "@iarna/toml": "^2.2.5",
"@start9labs/start-sdk": "file:../sdk/dist",
"@noble/hashes": "^1.4.0",
"@noble/curves": "^1.4.0", "@noble/curves": "^1.4.0",
"@noble/hashes": "^1.4.0",
"@start9labs/start-sdk": "file:../sdk/dist",
"esbuild-plugin-resolve": "^2.0.0", "esbuild-plugin-resolve": "^2.0.0",
"filebrowser": "^1.0.0", "filebrowser": "^1.0.0",
"isomorphic-fetch": "^3.0.0", "isomorphic-fetch": "^3.0.0",
"jsonpath": "^1.1.1",
"lodash.merge": "^4.6.2", "lodash.merge": "^4.6.2",
"node-fetch": "^3.1.0", "node-fetch": "^3.1.0",
"ts-matches": "^5.5.1", "ts-matches": "^5.5.1",
@@ -33,8 +35,12 @@
"devDependencies": { "devDependencies": {
"@swc/cli": "^0.1.62", "@swc/cli": "^0.1.62",
"@swc/core": "^1.3.65", "@swc/core": "^1.3.65",
"@types/jest": "^29.5.12",
"@types/jsonpath": "^0.2.4",
"@types/node": "^20.11.13", "@types/node": "^20.11.13",
"jest": "^29.7.0",
"prettier": "^3.2.5", "prettier": "^3.2.5",
"ts-jest": "^29.2.3",
"typescript": ">5.2" "typescript": ">5.2"
} }
} }

View File

@@ -64,7 +64,7 @@ const runType = object({
input: any, input: any,
timeout: number, timeout: number,
}, },
["timeout"], ["timeout", "input"],
), ),
}) })
const sandboxRunType = object({ const sandboxRunType = object({
@@ -77,7 +77,7 @@ const sandboxRunType = object({
input: any, input: any,
timeout: number, timeout: number,
}, },
["timeout"], ["timeout", "input"],
), ),
}) })
const callbackType = object({ const callbackType = object({

View File

@@ -68,15 +68,17 @@ export class DockerProcedureContainer {
key, key,
) )
} else if (volumeMount.type === "pointer") { } else if (volumeMount.type === "pointer") {
await effects.mount({ await effects
location: path, .mount({
target: { location: path,
packageId: volumeMount["package-id"], target: {
subpath: volumeMount.path, packageId: volumeMount["package-id"],
readonly: volumeMount.readonly, subpath: volumeMount.path,
volumeId: volumeMount["volume-id"], readonly: volumeMount.readonly,
}, volumeId: volumeMount["volume-id"],
}) },
})
.catch(console.warn)
} else if (volumeMount.type === "backup") { } else if (volumeMount.type === "backup") {
await overlay.mount({ type: "backup", subpath: null }, mounts[mount]) await overlay.mount({ type: "backup", subpath: null }, mounts[mount])
} }

View File

@@ -0,0 +1,387 @@
export default {
"peer-tor-address": {
name: "Peer Tor Address",
description: "The Tor address of the peer interface",
type: "pointer",
subtype: "package",
"package-id": "bitcoind",
target: "tor-address",
interface: "peer",
},
"rpc-tor-address": {
name: "RPC Tor Address",
description: "The Tor address of the RPC interface",
type: "pointer",
subtype: "package",
"package-id": "bitcoind",
target: "tor-address",
interface: "rpc",
},
rpc: {
type: "object",
name: "RPC Settings",
description: "RPC configuration options.",
spec: {
enable: {
type: "boolean",
name: "Enable",
description: "Allow remote RPC requests.",
default: true,
},
username: {
type: "string",
nullable: false,
name: "Username",
description: "The username for connecting to Bitcoin over RPC.",
warning:
"You will need to restart all services that depend on Bitcoin.",
default: "bitcoin",
masked: true,
pattern: "^[a-zA-Z0-9_]+$",
"pattern-description": "Must be alphanumeric (can contain underscore).",
},
password: {
type: "string",
nullable: false,
name: "RPC Password",
description: "The password for connecting to Bitcoin over RPC.",
warning:
"You will need to restart all services that depend on Bitcoin.",
default: {
charset: "a-z,2-7",
len: 20,
},
pattern: "^[a-zA-Z0-9_]+$",
"pattern-description": "Must be alphanumeric (can contain underscore).",
copyable: true,
masked: true,
},
advanced: {
type: "object",
name: "Advanced",
description: "Advanced RPC Settings",
spec: {
auth: {
name: "Authorization",
description:
"Username and hashed password for JSON-RPC connections. RPC clients connect using the usual http basic authentication.",
type: "list",
subtype: "string",
default: [],
spec: {
pattern: "^[a-zA-Z0-9_-]+:([0-9a-fA-F]{2})+\\$([0-9a-fA-F]{2})+$",
"pattern-description":
'Each item must be of the form "<USERNAME>:<SALT>$<HASH>".',
},
range: "[0,*)",
},
servertimeout: {
name: "Rpc Server Timeout",
description:
"Number of seconds after which an uncompleted RPC call will time out.",
type: "number",
nullable: false,
range: "[5,300]",
integral: true,
units: "seconds",
default: 30,
},
threads: {
name: "Threads",
description:
"Set the number of threads for handling RPC calls. You may wish to increase this if you are making lots of calls via an integration.",
type: "number",
nullable: false,
default: 16,
range: "[1,64]",
integral: true,
units: undefined,
},
workqueue: {
name: "Work Queue",
description:
"Set the depth of the work queue to service RPC calls. Determines how long the backlog of RPC requests can get before it just rejects new ones.",
type: "number",
nullable: false,
default: 128,
range: "[8,256]",
integral: true,
units: "requests",
},
},
},
},
},
"zmq-enabled": {
type: "boolean",
name: "ZeroMQ Enabled",
description:
"The ZeroMQ interface is useful for some applications which might require data related to block and transaction events from Bitcoin Core. For example, LND requires ZeroMQ be enabled for LND to get the latest block data",
default: true,
},
txindex: {
type: "boolean",
name: "Transaction Index",
description:
"By enabling Transaction Index (txindex) Bitcoin Core will build a complete transaction index. This allows Bitcoin Core to access any transaction with commands like `gettransaction`.",
default: true,
},
coinstatsindex: {
type: "boolean",
name: "Coinstats Index",
description:
"Enabling Coinstats Index reduces the time for the gettxoutsetinfo RPC to complete at the cost of using additional disk space",
default: false,
},
wallet: {
type: "object",
name: "Wallet",
description: "Wallet Settings",
spec: {
enable: {
name: "Enable Wallet",
description: "Load the wallet and enable wallet RPC calls.",
type: "boolean",
default: true,
},
avoidpartialspends: {
name: "Avoid Partial Spends",
description:
"Group outputs by address, selecting all or none, instead of selecting on a per-output basis. This improves privacy at the expense of higher transaction fees.",
type: "boolean",
default: true,
},
discardfee: {
name: "Discard Change Tolerance",
description:
"The fee rate (in BTC/kB) that indicates your tolerance for discarding change by adding it to the fee.",
type: "number",
nullable: false,
default: 0.0001,
range: "[0,.01]",
integral: false,
units: "BTC/kB",
},
},
},
advanced: {
type: "object",
name: "Advanced",
description: "Advanced Settings",
spec: {
mempool: {
type: "object",
name: "Mempool",
description: "Mempool Settings",
spec: {
persistmempool: {
type: "boolean",
name: "Persist Mempool",
description: "Save the mempool on shutdown and load on restart.",
default: true,
},
maxmempool: {
type: "number",
nullable: false,
name: "Max Mempool Size",
description:
"Keep the transaction memory pool below <n> megabytes.",
range: "[1,*)",
integral: true,
units: "MiB",
default: 300,
},
mempoolexpiry: {
type: "number",
nullable: false,
name: "Mempool Expiration",
description:
"Do not keep transactions in the mempool longer than <n> hours.",
range: "[1,*)",
integral: true,
units: "Hr",
default: 336,
},
mempoolfullrbf: {
name: "Enable Full RBF",
description:
"Policy for your node to use for relaying and mining unconfirmed transactions. For details, see https://github.com/bitcoin/bitcoin/blob/master/doc/release-notes/release-notes-24.0.1.md#notice-of-new-option-for-transaction-replacement-policies",
type: "boolean",
default: true,
},
permitbaremultisig: {
type: "boolean",
name: "Permit Bare Multisig",
description: "Relay non-P2SH multisig transactions",
default: true,
},
datacarrier: {
type: "boolean",
name: "Relay OP_RETURN Transactions",
description: "Relay transactions with OP_RETURN outputs",
default: true,
},
datacarriersize: {
type: "number",
nullable: false,
name: "Max OP_RETURN Size",
description: "Maximum size of data in OP_RETURN outputs to relay",
range: "[0,10000]",
integral: true,
units: "bytes",
default: 83,
},
},
},
peers: {
type: "object",
name: "Peers",
description: "Peer Connection Settings",
spec: {
listen: {
type: "boolean",
name: "Make Public",
description:
"Allow other nodes to find your server on the network.",
default: true,
},
onlyconnect: {
type: "boolean",
name: "Disable Peer Discovery",
description: "Only connect to specified peers.",
default: false,
},
onlyonion: {
type: "boolean",
name: "Disable Clearnet",
description: "Only connect to peers over Tor.",
default: false,
},
v2transport: {
type: "boolean",
name: "Use V2 P2P Transport Protocol",
description:
"Enable or disable the use of BIP324 V2 P2P transport protocol.",
default: false,
},
addnode: {
name: "Add Nodes",
description: "Add addresses of nodes to connect to.",
type: "list",
subtype: "object",
range: "[0,*)",
default: [],
spec: {
spec: {
hostname: {
type: "string",
nullable: false,
name: "Hostname",
description: "Domain or IP address of bitcoin peer",
pattern:
"(^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$)|((^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$)|(^[a-z2-7]{16}\\.onion$)|(^([a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?\\.)+[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$))",
"pattern-description":
"Must be either a domain name, or an IPv4 or IPv6 address. Do not include protocol scheme (eg 'http://') or port.",
},
port: {
type: "number",
nullable: true,
name: "Port",
description:
"Port that peer is listening on for inbound p2p connections",
range: "[0,65535]",
integral: true,
},
},
},
},
},
},
pruning: {
type: "union",
name: "Pruning Settings",
description:
"Blockchain Pruning Options\nReduce the blockchain size on disk\n",
warning:
"Disabling pruning will convert your node into a full archival node. This requires a resync of the entire blockchain, a process that may take several days.\n",
tag: {
id: "mode",
name: "Pruning Mode",
description:
"- Disabled: Disable pruning\n- Automatic: Limit blockchain size on disk to a certain number of megabytes\n",
"variant-names": {
disabled: "Disabled",
automatic: "Automatic",
},
},
variants: {
disabled: {},
automatic: {
size: {
type: "number",
nullable: false,
name: "Max Chain Size",
description: "Limit of blockchain size on disk.",
warning:
"Increasing this value will require re-syncing your node.",
default: 550,
range: "[550,1000000)",
integral: true,
units: "MiB",
},
},
},
default: "disabled",
},
dbcache: {
type: "number",
nullable: true,
name: "Database Cache",
description:
"How much RAM to allocate for caching the TXO set. Higher values improve syncing performance, but increase your chance of using up all your system's memory or corrupting your database in the event of an ungraceful shutdown. Set this high but comfortably below your system's total RAM during IBD, then turn down to 450 (or leave blank) once the sync completes.",
warning:
"WARNING: Increasing this value results in a higher chance of ungraceful shutdowns, which can leave your node unusable if it happens during the initial block download. Use this setting with caution. Be sure to set this back to the default (450 or leave blank) once your node is synced. DO NOT press the STOP button if your dbcache is large. Instead, set this number back to the default, hit save, and wait for bitcoind to restart on its own.",
range: "(0,*)",
integral: true,
units: "MiB",
},
blockfilters: {
type: "object",
name: "Block Filters",
description: "Settings for storing and serving compact block filters",
spec: {
blockfilterindex: {
type: "boolean",
name: "Compute Compact Block Filters (BIP158)",
description:
"Generate Compact Block Filters during initial sync (IBD) to enable 'getblockfilter' RPC. This is useful if dependent services need block filters to efficiently scan for addresses/transactions etc.",
default: true,
},
peerblockfilters: {
type: "boolean",
name: "Serve Compact Block Filters to Peers (BIP157)",
description:
"Serve Compact Block Filters as a peer service to other nodes on the network. This is useful if you wish to connect an SPV client to your node to make it efficient to scan transactions without having to download all block data. 'Compute Compact Block Filters (BIP158)' is required.",
default: false,
},
},
},
bloomfilters: {
type: "object",
name: "Bloom Filters (BIP37)",
description: "Setting for serving Bloom Filters",
spec: {
peerbloomfilters: {
type: "boolean",
name: "Serve Bloom Filters to Peers",
description:
"Peers have the option of setting filters on each connection they make after the version handshake has completed. Bloom filters are for clients implementing SPV (Simplified Payment Verification) that want to check that block headers connect together correctly, without needing to verify the full blockchain. The client must trust that the transactions in the chain are in fact valid. It is highly recommended AGAINST using for anything except Bisq integration.",
warning:
"This is ONLY for use with Bisq integration, please use Block Filters for all other applications.",
default: false,
},
},
},
},
},
}

View File

@@ -0,0 +1,127 @@
export default {
homepage: {
name: "Homepage",
description:
"The page that will be displayed when your Start9 Pages .onion address is visited. Since this page is technically publicly accessible, you can choose to which type of page to display.",
type: "union",
default: "welcome",
tag: {
id: "type",
name: "Type",
"variant-names": {
welcome: "Welcome",
index: "Table of Contents",
"web-page": "Web Page",
redirect: "Redirect",
},
},
variants: {
welcome: {},
index: {},
"web-page": {
source: {
name: "Folder Location",
description: "The service that contains your website files.",
type: "enum",
values: ["filebrowser", "nextcloud"],
"value-names": {},
default: "nextcloud",
},
folder: {
type: "string",
name: "Folder Path",
placeholder: "e.g. websites/resume",
description:
'The path to the folder that contains the static files of your website. For example, a value of "projects/resume" would tell Start9 Pages to look for that folder path in the selected service.',
pattern:
"^(\\.|[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)(/[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|/([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)*/?$",
"pattern-description": "Must be a valid relative file path",
nullable: false,
},
},
redirect: {
target: {
type: "string",
name: "Target Subdomain",
description:
"The name of the subdomain to redirect users to. This must be a valid subdomain site within your Start9 Pages.",
pattern: "^[a-z-]+$",
"pattern-description":
"May contain only lowercase characters and hyphens.",
nullable: false,
},
},
},
},
subdomains: {
type: "list",
name: "Subdomains",
description: "The websites you want to serve.",
default: [],
range: "[0, *)",
subtype: "object",
spec: {
"unique-by": "name",
"display-as": "{{name}}",
spec: {
name: {
type: "string",
nullable: false,
name: "Subdomain name",
description:
'The subdomain of your Start9 Pages .onion address to host the website on. For example, a value of "me" would produce a website hosted at http://me.xxxxxx.onion.',
pattern: "^[a-z-]+$",
"pattern-description":
"May contain only lowercase characters and hyphens",
},
settings: {
type: "union",
name: "Settings",
description:
"The desired behavior you want to occur when the subdomain is visited. You can either redirect to another subdomain, or load a stored web page.",
default: "web-page",
tag: {
id: "type",
name: "Type",
"variant-names": { "web-page": "Web Page", redirect: "Redirect" },
},
variants: {
"web-page": {
source: {
name: "Folder Location",
description: "The service that contains your website files.",
type: "enum",
values: ["filebrowser", "nextcloud"],
"value-names": {},
default: "nextcloud",
},
folder: {
type: "string",
name: "Folder Path",
placeholder: "e.g. websites/resume",
description:
'The path to the folder that contains the website files. For example, a value of "projects/resume" would tell Start9 Pages to look for that folder path in the selected service.',
pattern:
"^(\\.|[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)(/[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|/([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)*/?$",
"pattern-description": "Must be a valid relative file path",
nullable: false,
},
},
redirect: {
target: {
type: "string",
name: "Target Subdomain",
description:
"The subdomain of your Start9 Pages .onion address to redirect to. This should be the name of another subdomain on Start9 Pages. Leave empty to redirect to the homepage.",
pattern: "^[a-z-]+$",
"pattern-description":
"May contain only lowercase characters and hyphens.",
nullable: false,
},
},
},
},
},
},
},
}

View File

@@ -0,0 +1,28 @@
export default {
"tor-address": {
name: "Tor Address",
description: "The Tor address of the network interface",
type: "pointer",
subtype: "package",
"package-id": "nostr-wallet-connect",
target: "tor-address",
interface: "main",
},
"lan-address": {
name: "LAN Address",
description: "The LAN address of the network interface",
type: "pointer",
subtype: "package",
"package-id": "nostr-wallet-connect",
target: "lan-address",
interface: "main",
},
"nostr-relay": {
type: "string",
name: "Nostr Relay",
default: "wss://relay.getalby.com/v1",
description: "The Nostr Relay to use for Nostr Wallet Connect connections",
copyable: true,
nullable: false,
},
}

View File

@@ -0,0 +1,39 @@
export default {
"instance-name": {
type: "string",
name: "SearXNG Instance Name",
description:
"Enter a name for your SearXNG instance. This is the name that will be listed if you want to share your SearXNG engine publicly.",
nullable: false,
default: "My SearXNG Engine",
placeholder: "Uncle Jim SearXNG Engine",
},
"tor-url": {
name: "Enable Tor address as the base URL",
description:
"Activates the utilization of a .onion address as the primary URL, particularly beneficial for publicly hosted instances over the Tor network.",
type: "boolean",
default: false,
},
"enable-metrics": {
name: "Enable Stats",
description:
"Your SearXNG instance will collect anonymous stats about its own usage and performance. You can view these metrics by appending `/stats` or `/stats/errors` to your SearXNG URL.",
type: "boolean",
default: true,
}, //,
// "email-address": {
// "type": "string",
// "name": "Email Address",
// "description": "Your Email address - required to create an SSL certificate.",
// "nullable": false,
// "default": "youremail@domain.com",
// },
// "public-host": {
// "type": "string",
// "name": "Public Domain Name",
// "description": "Enter a domain name here if you want to share your SearXNG engine publicly. You will also need to modify your domain name's DNS settings to point to your Start9 server.",
// "nullable": true,
// "placeholder": "https://search.mydomain.com"
// }
}

View File

@@ -0,0 +1,791 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`transformConfigSpec transformConfigSpec(bitcoind) 1`] = `
{
"advanced": {
"description": "Advanced Settings",
"name": "Advanced",
"spec": {
"blockfilters": {
"description": "Settings for storing and serving compact block filters",
"name": "Block Filters",
"spec": {
"blockfilterindex": {
"default": true,
"description": "Generate Compact Block Filters during initial sync (IBD) to enable 'getblockfilter' RPC. This is useful if dependent services need block filters to efficiently scan for addresses/transactions etc.",
"disabled": false,
"immutable": false,
"name": "Compute Compact Block Filters (BIP158)",
"type": "toggle",
"warning": null,
},
"peerblockfilters": {
"default": false,
"description": "Serve Compact Block Filters as a peer service to other nodes on the network. This is useful if you wish to connect an SPV client to your node to make it efficient to scan transactions without having to download all block data. 'Compute Compact Block Filters (BIP158)' is required.",
"disabled": false,
"immutable": false,
"name": "Serve Compact Block Filters to Peers (BIP157)",
"type": "toggle",
"warning": null,
},
},
"type": "object",
"warning": null,
},
"bloomfilters": {
"description": "Setting for serving Bloom Filters",
"name": "Bloom Filters (BIP37)",
"spec": {
"peerbloomfilters": {
"default": false,
"description": "Peers have the option of setting filters on each connection they make after the version handshake has completed. Bloom filters are for clients implementing SPV (Simplified Payment Verification) that want to check that block headers connect together correctly, without needing to verify the full blockchain. The client must trust that the transactions in the chain are in fact valid. It is highly recommended AGAINST using for anything except Bisq integration.",
"disabled": false,
"immutable": false,
"name": "Serve Bloom Filters to Peers",
"type": "toggle",
"warning": "This is ONLY for use with Bisq integration, please use Block Filters for all other applications.",
},
},
"type": "object",
"warning": null,
},
"dbcache": {
"default": null,
"description": "How much RAM to allocate for caching the TXO set. Higher values improve syncing performance, but increase your chance of using up all your system's memory or corrupting your database in the event of an ungraceful shutdown. Set this high but comfortably below your system's total RAM during IBD, then turn down to 450 (or leave blank) once the sync completes.",
"disabled": false,
"immutable": false,
"integer": true,
"max": null,
"min": null,
"name": "Database Cache",
"placeholder": null,
"required": false,
"step": null,
"type": "number",
"units": "MiB",
"warning": "WARNING: Increasing this value results in a higher chance of ungraceful shutdowns, which can leave your node unusable if it happens during the initial block download. Use this setting with caution. Be sure to set this back to the default (450 or leave blank) once your node is synced. DO NOT press the STOP button if your dbcache is large. Instead, set this number back to the default, hit save, and wait for bitcoind to restart on its own.",
},
"mempool": {
"description": "Mempool Settings",
"name": "Mempool",
"spec": {
"datacarrier": {
"default": true,
"description": "Relay transactions with OP_RETURN outputs",
"disabled": false,
"immutable": false,
"name": "Relay OP_RETURN Transactions",
"type": "toggle",
"warning": null,
},
"datacarriersize": {
"default": 83,
"description": "Maximum size of data in OP_RETURN outputs to relay",
"disabled": false,
"immutable": false,
"integer": true,
"max": 10000,
"min": null,
"name": "Max OP_RETURN Size",
"placeholder": null,
"required": true,
"step": null,
"type": "number",
"units": "bytes",
"warning": null,
},
"maxmempool": {
"default": 300,
"description": "Keep the transaction memory pool below <n> megabytes.",
"disabled": false,
"immutable": false,
"integer": true,
"max": null,
"min": 1,
"name": "Max Mempool Size",
"placeholder": null,
"required": true,
"step": null,
"type": "number",
"units": "MiB",
"warning": null,
},
"mempoolexpiry": {
"default": 336,
"description": "Do not keep transactions in the mempool longer than <n> hours.",
"disabled": false,
"immutable": false,
"integer": true,
"max": null,
"min": 1,
"name": "Mempool Expiration",
"placeholder": null,
"required": true,
"step": null,
"type": "number",
"units": "Hr",
"warning": null,
},
"mempoolfullrbf": {
"default": true,
"description": "Policy for your node to use for relaying and mining unconfirmed transactions. For details, see https://github.com/bitcoin/bitcoin/blob/master/doc/release-notes/release-notes-24.0.1.md#notice-of-new-option-for-transaction-replacement-policies",
"disabled": false,
"immutable": false,
"name": "Enable Full RBF",
"type": "toggle",
"warning": null,
},
"permitbaremultisig": {
"default": true,
"description": "Relay non-P2SH multisig transactions",
"disabled": false,
"immutable": false,
"name": "Permit Bare Multisig",
"type": "toggle",
"warning": null,
},
"persistmempool": {
"default": true,
"description": "Save the mempool on shutdown and load on restart.",
"disabled": false,
"immutable": false,
"name": "Persist Mempool",
"type": "toggle",
"warning": null,
},
},
"type": "object",
"warning": null,
},
"peers": {
"description": "Peer Connection Settings",
"name": "Peers",
"spec": {
"addnode": {
"default": [],
"description": "Add addresses of nodes to connect to.",
"disabled": false,
"maxLength": null,
"minLength": null,
"name": "Add Nodes",
"spec": {
"displayAs": null,
"spec": {
"hostname": {
"default": null,
"description": "Domain or IP address of bitcoin peer",
"disabled": false,
"generate": null,
"immutable": false,
"inputmode": "text",
"masked": false,
"maxLength": null,
"minLength": null,
"name": "Hostname",
"patterns": [
{
"description": "Must be either a domain name, or an IPv4 or IPv6 address. Do not include protocol scheme (eg 'http://') or port.",
"regex": "(^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$)|((^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$)|(^[a-z2-7]{16}\\.onion$)|(^([a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?\\.)+[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$))",
},
],
"placeholder": null,
"required": true,
"type": "text",
"warning": null,
},
"port": {
"default": null,
"description": "Port that peer is listening on for inbound p2p connections",
"disabled": false,
"immutable": false,
"integer": true,
"max": 65535,
"min": null,
"name": "Port",
"placeholder": null,
"required": false,
"step": null,
"type": "number",
"units": null,
"warning": null,
},
},
"type": "object",
"uniqueBy": null,
},
"type": "list",
"warning": null,
},
"listen": {
"default": true,
"description": "Allow other nodes to find your server on the network.",
"disabled": false,
"immutable": false,
"name": "Make Public",
"type": "toggle",
"warning": null,
},
"onlyconnect": {
"default": false,
"description": "Only connect to specified peers.",
"disabled": false,
"immutable": false,
"name": "Disable Peer Discovery",
"type": "toggle",
"warning": null,
},
"onlyonion": {
"default": false,
"description": "Only connect to peers over Tor.",
"disabled": false,
"immutable": false,
"name": "Disable Clearnet",
"type": "toggle",
"warning": null,
},
"v2transport": {
"default": false,
"description": "Enable or disable the use of BIP324 V2 P2P transport protocol.",
"disabled": false,
"immutable": false,
"name": "Use V2 P2P Transport Protocol",
"type": "toggle",
"warning": null,
},
},
"type": "object",
"warning": null,
},
"pruning": {
"default": "disabled",
"description": "- Disabled: Disable pruning
- Automatic: Limit blockchain size on disk to a certain number of megabytes
",
"disabled": false,
"immutable": false,
"name": "Pruning Mode",
"required": true,
"type": "union",
"variants": {
"automatic": {
"name": "Automatic",
"spec": {
"size": {
"default": 550,
"description": "Limit of blockchain size on disk.",
"disabled": false,
"immutable": false,
"integer": true,
"max": 999999,
"min": 550,
"name": "Max Chain Size",
"placeholder": null,
"required": true,
"step": null,
"type": "number",
"units": "MiB",
"warning": "Increasing this value will require re-syncing your node.",
},
},
},
"disabled": {
"name": "Disabled",
"spec": {},
},
},
"warning": null,
},
},
"type": "object",
"warning": null,
},
"coinstatsindex": {
"default": false,
"description": "Enabling Coinstats Index reduces the time for the gettxoutsetinfo RPC to complete at the cost of using additional disk space",
"disabled": false,
"immutable": false,
"name": "Coinstats Index",
"type": "toggle",
"warning": null,
},
"rpc": {
"description": "RPC configuration options.",
"name": "RPC Settings",
"spec": {
"advanced": {
"description": "Advanced RPC Settings",
"name": "Advanced",
"spec": {
"auth": {
"default": [],
"description": "Username and hashed password for JSON-RPC connections. RPC clients connect using the usual http basic authentication.",
"disabled": false,
"maxLength": null,
"minLength": null,
"name": "Authorization",
"spec": {
"generate": null,
"inputmode": "text",
"masked": false,
"maxLength": null,
"minLength": null,
"patterns": [
{
"description": "Each item must be of the form "<USERNAME>:<SALT>$<HASH>".",
"regex": "^[a-zA-Z0-9_-]+:([0-9a-fA-F]{2})+\\$([0-9a-fA-F]{2})+$",
},
],
"placeholder": null,
"type": "text",
},
"type": "list",
"warning": null,
},
"servertimeout": {
"default": 30,
"description": "Number of seconds after which an uncompleted RPC call will time out.",
"disabled": false,
"immutable": false,
"integer": true,
"max": 300,
"min": 5,
"name": "Rpc Server Timeout",
"placeholder": null,
"required": true,
"step": null,
"type": "number",
"units": "seconds",
"warning": null,
},
"threads": {
"default": 16,
"description": "Set the number of threads for handling RPC calls. You may wish to increase this if you are making lots of calls via an integration.",
"disabled": false,
"immutable": false,
"integer": true,
"max": 64,
"min": 1,
"name": "Threads",
"placeholder": null,
"required": true,
"step": null,
"type": "number",
"units": null,
"warning": null,
},
"workqueue": {
"default": 128,
"description": "Set the depth of the work queue to service RPC calls. Determines how long the backlog of RPC requests can get before it just rejects new ones.",
"disabled": false,
"immutable": false,
"integer": true,
"max": 256,
"min": 8,
"name": "Work Queue",
"placeholder": null,
"required": true,
"step": null,
"type": "number",
"units": "requests",
"warning": null,
},
},
"type": "object",
"warning": null,
},
"enable": {
"default": true,
"description": "Allow remote RPC requests.",
"disabled": false,
"immutable": false,
"name": "Enable",
"type": "toggle",
"warning": null,
},
"password": {
"default": {
"charset": "a-z,2-7",
"len": 20,
},
"description": "The password for connecting to Bitcoin over RPC.",
"disabled": false,
"generate": null,
"immutable": false,
"inputmode": "text",
"masked": true,
"maxLength": null,
"minLength": null,
"name": "RPC Password",
"patterns": [
{
"description": "Must be alphanumeric (can contain underscore).",
"regex": "^[a-zA-Z0-9_]+$",
},
],
"placeholder": null,
"required": true,
"type": "text",
"warning": "You will need to restart all services that depend on Bitcoin.",
},
"username": {
"default": "bitcoin",
"description": "The username for connecting to Bitcoin over RPC.",
"disabled": false,
"generate": null,
"immutable": false,
"inputmode": "text",
"masked": true,
"maxLength": null,
"minLength": null,
"name": "Username",
"patterns": [
{
"description": "Must be alphanumeric (can contain underscore).",
"regex": "^[a-zA-Z0-9_]+$",
},
],
"placeholder": null,
"required": true,
"type": "text",
"warning": "You will need to restart all services that depend on Bitcoin.",
},
},
"type": "object",
"warning": null,
},
"txindex": {
"default": true,
"description": "By enabling Transaction Index (txindex) Bitcoin Core will build a complete transaction index. This allows Bitcoin Core to access any transaction with commands like \`gettransaction\`.",
"disabled": false,
"immutable": false,
"name": "Transaction Index",
"type": "toggle",
"warning": null,
},
"wallet": {
"description": "Wallet Settings",
"name": "Wallet",
"spec": {
"avoidpartialspends": {
"default": true,
"description": "Group outputs by address, selecting all or none, instead of selecting on a per-output basis. This improves privacy at the expense of higher transaction fees.",
"disabled": false,
"immutable": false,
"name": "Avoid Partial Spends",
"type": "toggle",
"warning": null,
},
"discardfee": {
"default": 0.0001,
"description": "The fee rate (in BTC/kB) that indicates your tolerance for discarding change by adding it to the fee.",
"disabled": false,
"immutable": false,
"integer": false,
"max": 0.01,
"min": null,
"name": "Discard Change Tolerance",
"placeholder": null,
"required": true,
"step": null,
"type": "number",
"units": "BTC/kB",
"warning": null,
},
"enable": {
"default": true,
"description": "Load the wallet and enable wallet RPC calls.",
"disabled": false,
"immutable": false,
"name": "Enable Wallet",
"type": "toggle",
"warning": null,
},
},
"type": "object",
"warning": null,
},
"zmq-enabled": {
"default": true,
"description": "The ZeroMQ interface is useful for some applications which might require data related to block and transaction events from Bitcoin Core. For example, LND requires ZeroMQ be enabled for LND to get the latest block data",
"disabled": false,
"immutable": false,
"name": "ZeroMQ Enabled",
"type": "toggle",
"warning": null,
},
}
`;
exports[`transformConfigSpec transformConfigSpec(embassyPages) 1`] = `
{
"homepage": {
"default": "welcome",
"description": null,
"disabled": false,
"immutable": false,
"name": "Type",
"required": true,
"type": "union",
"variants": {
"index": {
"name": "Table of Contents",
"spec": {},
},
"redirect": {
"name": "Redirect",
"spec": {
"target": {
"default": null,
"description": "The name of the subdomain to redirect users to. This must be a valid subdomain site within your Start9 Pages.",
"disabled": false,
"generate": null,
"immutable": false,
"inputmode": "text",
"masked": false,
"maxLength": null,
"minLength": null,
"name": "Target Subdomain",
"patterns": [
{
"description": "May contain only lowercase characters and hyphens.",
"regex": "^[a-z-]+$",
},
],
"placeholder": null,
"required": true,
"type": "text",
"warning": null,
},
},
},
"web-page": {
"name": "Web Page",
"spec": {
"folder": {
"default": null,
"description": "The path to the folder that contains the static files of your website. For example, a value of "projects/resume" would tell Start9 Pages to look for that folder path in the selected service.",
"disabled": false,
"generate": null,
"immutable": false,
"inputmode": "text",
"masked": false,
"maxLength": null,
"minLength": null,
"name": "Folder Path",
"patterns": [
{
"description": "Must be a valid relative file path",
"regex": "^(\\.|[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)(/[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|/([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)*/?$",
},
],
"placeholder": "e.g. websites/resume",
"required": true,
"type": "text",
"warning": null,
},
"source": {
"default": "nextcloud",
"description": "The service that contains your website files.",
"disabled": false,
"immutable": false,
"name": "Folder Location",
"required": false,
"type": "select",
"values": {
"filebrowser": "filebrowser",
"nextcloud": "nextcloud",
},
"warning": null,
},
},
},
"welcome": {
"name": "Welcome",
"spec": {},
},
},
"warning": null,
},
"subdomains": {
"default": [],
"description": "The websites you want to serve.",
"disabled": false,
"maxLength": null,
"minLength": null,
"name": "Subdomains",
"spec": {
"displayAs": "{{name}}",
"spec": {
"name": {
"default": null,
"description": "The subdomain of your Start9 Pages .onion address to host the website on. For example, a value of "me" would produce a website hosted at http://me.xxxxxx.onion.",
"disabled": false,
"generate": null,
"immutable": false,
"inputmode": "text",
"masked": false,
"maxLength": null,
"minLength": null,
"name": "Subdomain name",
"patterns": [
{
"description": "May contain only lowercase characters and hyphens",
"regex": "^[a-z-]+$",
},
],
"placeholder": null,
"required": true,
"type": "text",
"warning": null,
},
"settings": {
"default": "web-page",
"description": null,
"disabled": false,
"immutable": false,
"name": "Type",
"required": true,
"type": "union",
"variants": {
"redirect": {
"name": "Redirect",
"spec": {
"target": {
"default": null,
"description": "The subdomain of your Start9 Pages .onion address to redirect to. This should be the name of another subdomain on Start9 Pages. Leave empty to redirect to the homepage.",
"disabled": false,
"generate": null,
"immutable": false,
"inputmode": "text",
"masked": false,
"maxLength": null,
"minLength": null,
"name": "Target Subdomain",
"patterns": [
{
"description": "May contain only lowercase characters and hyphens.",
"regex": "^[a-z-]+$",
},
],
"placeholder": null,
"required": true,
"type": "text",
"warning": null,
},
},
},
"web-page": {
"name": "Web Page",
"spec": {
"folder": {
"default": null,
"description": "The path to the folder that contains the website files. For example, a value of "projects/resume" would tell Start9 Pages to look for that folder path in the selected service.",
"disabled": false,
"generate": null,
"immutable": false,
"inputmode": "text",
"masked": false,
"maxLength": null,
"minLength": null,
"name": "Folder Path",
"patterns": [
{
"description": "Must be a valid relative file path",
"regex": "^(\\.|[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)(/[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|/([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)*/?$",
},
],
"placeholder": "e.g. websites/resume",
"required": true,
"type": "text",
"warning": null,
},
"source": {
"default": "nextcloud",
"description": "The service that contains your website files.",
"disabled": false,
"immutable": false,
"name": "Folder Location",
"required": false,
"type": "select",
"values": {
"filebrowser": "filebrowser",
"nextcloud": "nextcloud",
},
"warning": null,
},
},
},
},
"warning": null,
},
},
"type": "object",
"uniqueBy": "name",
},
"type": "list",
"warning": null,
},
}
`;
exports[`transformConfigSpec transformConfigSpec(nostr) 1`] = `
{
"nostr-relay": {
"default": "wss://relay.getalby.com/v1",
"description": "The Nostr Relay to use for Nostr Wallet Connect connections",
"disabled": false,
"generate": null,
"immutable": false,
"inputmode": "text",
"masked": false,
"maxLength": null,
"minLength": null,
"name": "Nostr Relay",
"patterns": [],
"placeholder": null,
"required": true,
"type": "text",
"warning": null,
},
}
`;
exports[`transformConfigSpec transformConfigSpec(searNXG) 1`] = `
{
"enable-metrics": {
"default": true,
"description": "Your SearXNG instance will collect anonymous stats about its own usage and performance. You can view these metrics by appending \`/stats\` or \`/stats/errors\` to your SearXNG URL.",
"disabled": false,
"immutable": false,
"name": "Enable Stats",
"type": "toggle",
"warning": null,
},
"instance-name": {
"default": "My SearXNG Engine",
"description": "Enter a name for your SearXNG instance. This is the name that will be listed if you want to share your SearXNG engine publicly.",
"disabled": false,
"generate": null,
"immutable": false,
"inputmode": "text",
"masked": false,
"maxLength": null,
"minLength": null,
"name": "SearXNG Instance Name",
"patterns": [],
"placeholder": "Uncle Jim SearXNG Engine",
"required": true,
"type": "text",
"warning": null,
},
"tor-url": {
"default": false,
"description": "Activates the utilization of a .onion address as the primary URL, particularly beneficial for publicly hosted instances over the Tor network.",
"disabled": false,
"immutable": false,
"name": "Enable Tor address as the base URL",
"type": "toggle",
"warning": null,
},
}
`;

View File

@@ -45,9 +45,11 @@ import {
OldConfigSpec, OldConfigSpec,
matchOldConfigSpec, matchOldConfigSpec,
transformConfigSpec, transformConfigSpec,
transformNewConfigToOld,
transformOldConfigToNew, transformOldConfigToNew,
} from "./transformConfigSpec" } from "./transformConfigSpec"
import { MainEffects } from "@start9labs/start-sdk/cjs/lib/StartSdk" import { MainEffects } from "@start9labs/start-sdk/cjs/lib/StartSdk"
import { StorePath } from "@start9labs/start-sdk/cjs/lib/store/PathBuilder"
type Optional<A> = A | undefined | null type Optional<A> = A | undefined | null
function todo(): never { function todo(): never {
@@ -57,7 +59,7 @@ const execFile = promisify(childProcess.execFile)
const MANIFEST_LOCATION = "/usr/lib/startos/package/embassyManifest.json" const MANIFEST_LOCATION = "/usr/lib/startos/package/embassyManifest.json"
export const EMBASSY_JS_LOCATION = "/usr/lib/startos/package/embassy.js" export const EMBASSY_JS_LOCATION = "/usr/lib/startos/package/embassy.js"
const EMBASSY_POINTER_PATH_PREFIX = "/embassyConfig" const EMBASSY_POINTER_PATH_PREFIX = "/embassyConfig" as StorePath
const matchSetResult = object( const matchSetResult = object(
{ {
@@ -101,6 +103,11 @@ const matchSetResult = object(
["depends-on", "dependsOn"], ["depends-on", "dependsOn"],
) )
type OldGetConfigRes = {
config?: null | Record<string, unknown>
spec: OldConfigSpec
}
export type PackagePropertiesV2 = { export type PackagePropertiesV2 = {
[name: string]: PackagePropertyObject | PackagePropertyString [name: string]: PackagePropertyObject | PackagePropertyString
} }
@@ -232,7 +239,7 @@ export class SystemForEmbassy implements System {
effects: Effects, effects: Effects,
options: { options: {
procedure: JsonPath procedure: JsonPath
input: unknown input?: unknown
timeout?: number | undefined timeout?: number | undefined
}, },
): Promise<RpcResult> { ): Promise<RpcResult> {
@@ -294,7 +301,7 @@ export class SystemForEmbassy implements System {
effects: Effects, effects: Effects,
options: { options: {
procedure: JsonPath procedure: JsonPath
input: unknown input?: unknown
timeout?: number | undefined timeout?: number | undefined
}, },
): Promise<unknown> { ): Promise<unknown> {
@@ -342,12 +349,7 @@ export class SystemForEmbassy implements System {
options.timeout || null, options.timeout || null,
) )
case procedures[1] === "dependencies" && procedures[3] === "query": case procedures[1] === "dependencies" && procedures[3] === "query":
return this.dependenciesAutoconfig( return null
effects,
procedures[2],
input,
options.timeout || null,
)
case procedures[1] === "dependencies" && procedures[3] === "update": case procedures[1] === "dependencies" && procedures[3] === "update":
return this.dependenciesAutoconfig( return this.dependenciesAutoconfig(
@@ -444,7 +446,11 @@ export class SystemForEmbassy implements System {
description: interfaceValue.description, description: interfaceValue.description,
hasPrimary: false, hasPrimary: false,
disabled: false, disabled: false,
type: "api", type:
interfaceValue.ui &&
(origin.scheme === "http" || origin.sslScheme === "https")
? "ui"
: "api",
masked: false, masked: false,
path: "", path: "",
schemeOverride: null, schemeOverride: null,
@@ -542,14 +548,12 @@ export class SystemForEmbassy implements System {
effects: Effects, effects: Effects,
timeoutMs: number | null, timeoutMs: number | null,
): Promise<T.ConfigRes> { ): Promise<T.ConfigRes> {
return this.getConfigUncleaned(effects, timeoutMs) return this.getConfigUncleaned(effects, timeoutMs).then(convertToNewConfig)
.then(removePointers)
.then(convertToNewConfig)
} }
private async getConfigUncleaned( private async getConfigUncleaned(
effects: Effects, effects: Effects,
timeoutMs: number | null, timeoutMs: number | null,
): Promise<T.ConfigRes> { ): Promise<OldGetConfigRes> {
const config = this.manifest.config?.get const config = this.manifest.config?.get
if (!config) return { spec: {} } if (!config) return { spec: {} }
if (config.type === "docker") { if (config.type === "docker") {
@@ -574,7 +578,7 @@ export class SystemForEmbassy implements System {
if (!method) throw new Error("Expecting that the method getConfig exists") if (!method) throw new Error("Expecting that the method getConfig exists")
return (await method(polyfillEffects(effects, this.manifest)).then( return (await method(polyfillEffects(effects, this.manifest)).then(
(x) => { (x) => {
if ("result" in x) return x.result if ("result" in x) return JSON.parse(JSON.stringify(x.result))
if ("error" in x) throw new Error("Error getting config: " + x.error) if ("error" in x) throw new Error("Error getting config: " + x.error)
throw new Error("Error getting config: " + x["error-code"][1]) throw new Error("Error getting config: " + x["error-code"][1])
}, },
@@ -586,13 +590,18 @@ export class SystemForEmbassy implements System {
newConfigWithoutPointers: unknown, newConfigWithoutPointers: unknown,
timeoutMs: number | null, timeoutMs: number | null,
): Promise<void> { ): Promise<void> {
const newConfig = structuredClone(newConfigWithoutPointers) const spec = await this.getConfigUncleaned(effects, timeoutMs).then(
await updateConfig( (x) => x.spec,
effects,
this.manifest,
await this.getConfigUncleaned(effects, timeoutMs).then((x) => x.spec),
newConfig,
) )
const newConfig = transformNewConfigToOld(
spec,
structuredClone(newConfigWithoutPointers as Record<string, unknown>),
)
await updateConfig(effects, this.manifest, spec, newConfig)
await effects.store.set({
path: EMBASSY_POINTER_PATH_PREFIX,
value: newConfig,
})
const setConfigValue = this.manifest.config?.set const setConfigValue = this.manifest.config?.set
if (!setConfigValue) return if (!setConfigValue) return
if (setConfigValue.type === "docker") { if (setConfigValue.type === "docker") {
@@ -822,68 +831,22 @@ export class SystemForEmbassy implements System {
})) as any })) as any
} }
} }
private async dependenciesCheck(
effects: Effects,
id: string,
oldConfig: unknown,
timeoutMs: number | null,
): Promise<object> {
const actionProcedure = this.manifest.dependencies?.[id]?.config?.check
if (!actionProcedure) return { message: "Action not found", value: null }
if (actionProcedure.type === "docker") {
const container = await DockerProcedureContainer.of(
effects,
this.manifest.id,
actionProcedure,
this.manifest.volumes,
)
return JSON.parse(
(
await container.execFail(
[
actionProcedure.entrypoint,
...actionProcedure.args,
JSON.stringify(oldConfig),
],
timeoutMs,
)
).stdout.toString(),
)
} else if (actionProcedure.type === "script") {
const moduleCode = await this.moduleCode
const method = moduleCode.dependencies?.[id]?.check
if (!method)
throw new Error(
`Expecting that the method dependency check ${id} exists`,
)
return (await method(
polyfillEffects(effects, this.manifest),
oldConfig as any,
).then((x) => {
if ("result" in x) return x.result
if ("error" in x) throw new Error("Error getting config: " + x.error)
throw new Error("Error getting config: " + x["error-code"][1])
})) as any
} else {
return {}
}
}
private async dependenciesAutoconfig( private async dependenciesAutoconfig(
effects: Effects, effects: Effects,
id: string, id: string,
oldConfig: unknown, input: unknown,
timeoutMs: number | null, timeoutMs: number | null,
): Promise<void> { ): Promise<void> {
const oldConfig = object({ remoteConfig: any }).unsafeCast(
input,
).remoteConfig
// TODO: docker // TODO: docker
const moduleCode = await this.moduleCode const moduleCode = await this.moduleCode
const method = moduleCode.dependencies?.[id]?.autoConfigure const method = moduleCode.dependencies?.[id]?.autoConfigure
if (!method) if (!method) return
throw new Error(
`Expecting that the method dependency autoConfigure ${id} exists`,
)
return (await method( return (await method(
polyfillEffects(effects, this.manifest), polyfillEffects(effects, this.manifest),
oldConfig as any, oldConfig,
).then((x) => { ).then((x) => {
if ("result" in x) return x.result if ("result" in x) return x.result
if ("error" in x) throw new Error("Error getting config: " + x.error) if ("error" in x) throw new Error("Error getting config: " + x.error)
@@ -891,14 +854,6 @@ export class SystemForEmbassy implements System {
})) as any })) as any
} }
} }
async function removePointers(value: T.ConfigRes): Promise<T.ConfigRes> {
const startingSpec = structuredClone(value.spec)
const config =
value.config && cleanConfigFromPointers(value.config, startingSpec)
const spec = cleanSpecOfPointers(startingSpec)
return { config, spec }
}
const matchPointer = object({ const matchPointer = object({
type: literal("pointer"), type: literal("pointer"),
@@ -958,108 +913,99 @@ type CleanConfigFromPointers<C, S> =
} : } :
null null
function cleanConfigFromPointers<C, S>(
config: C,
spec: S,
): CleanConfigFromPointers<C, S> {
const newConfig = {} as CleanConfigFromPointers<C, S>
if (!(object.test(config) && object.test(spec)) || newConfig == null)
return null as CleanConfigFromPointers<C, S>
for (const key of Object.keys(spec)) {
if (!isKeyOf(key, spec)) continue
if (!isKeyOf(key, config)) continue
const partSpec = spec[key]
if (matchPointer.test(partSpec)) continue
;(newConfig as any)[key] = matchSpec.test(partSpec)
? cleanConfigFromPointers(config[key], partSpec.spec)
: config[key]
}
return newConfig as CleanConfigFromPointers<C, S>
}
async function updateConfig( async function updateConfig(
effects: Effects, effects: Effects,
manifest: Manifest, manifest: Manifest,
spec: unknown, spec: OldConfigSpec,
mutConfigValue: unknown, mutConfigValue: Record<string, unknown>,
) { ) {
if (!dictionary([string, unknown]).test(spec)) return
if (!dictionary([string, unknown]).test(mutConfigValue)) return
for (const key in spec) { for (const key in spec) {
const specValue = spec[key] const specValue = spec[key]
const newConfigValue = mutConfigValue[key] if (specValue.type === "object") {
if (matchSpec.test(specValue)) {
const updateObject = { spec: newConfigValue }
await updateConfig( await updateConfig(
effects, effects,
manifest, manifest,
{ spec: specValue.spec }, specValue.spec as OldConfigSpec,
updateObject, mutConfigValue[key] as Record<string, unknown>,
) )
mutConfigValue[key] = updateObject.spec } else if (specValue.type === "list" && specValue.subtype === "object") {
} const list = mutConfigValue[key] as unknown[]
if ( for (let val of list) {
matchVariants.test(specValue) && await updateConfig(
object({ tag: object({ id: string }) }).test(newConfigValue) && effects,
newConfigValue.tag.id in specValue.variants manifest,
) { { ...(specValue.spec as any), type: "object" as const },
// Not going to do anything on the variants... val as Record<string, unknown>,
} )
if (!matchPointer.test(specValue)) continue }
if (matchPointerConfig.test(specValue)) { } else if (specValue.type === "union") {
const configValue = (await effects.store.get({ const union = mutConfigValue[key] as Record<string, unknown>
packageId: specValue["package-id"], await updateConfig(
callback() {}, effects,
path: `${EMBASSY_POINTER_PATH_PREFIX}${specValue.selector}` as any,
})) as any
mutConfigValue[key] = configValue
}
if (matchPointerPackage.test(specValue)) {
if (specValue.target === "tor-key")
throw new Error("This service uses an unsupported target TorKey")
const specInterface = specValue.interface
const serviceInterfaceId = extractServiceInterfaceId(
manifest, manifest,
specInterface, specValue.variants[union[specValue.tag.id] as string] as OldConfigSpec,
mutConfigValue[key] as Record<string, unknown>,
) )
if (!serviceInterfaceId) { } else if (
mutConfigValue[key] = "" specValue.type === "pointer" &&
return specValue.subtype === "package"
} ) {
const filled = await utils if (specValue.target === "config") {
.getServiceInterface(effects, { const jp = require("jsonpath")
const remoteConfig = await effects.store.get({
packageId: specValue["package-id"], packageId: specValue["package-id"],
id: serviceInterfaceId, callback: () => effects.restart(),
path: EMBASSY_POINTER_PATH_PREFIX,
}) })
.once() console.debug(remoteConfig)
.catch((x) => { const configValue = specValue.multi
console.error("Could not get the service interface", x) ? jp.query(remoteConfig, specValue.selector)
return null : jp.query(remoteConfig, specValue.selector, 1)[0]
}) mutConfigValue[key] = configValue === undefined ? null : configValue
const catchFn = <X>(fn: () => X) => { } else if (specValue.target === "tor-key") {
try { throw new Error("This service uses an unsupported target TorKey")
return fn() } else {
} catch (e) { const specInterface = specValue.interface
return undefined const serviceInterfaceId = extractServiceInterfaceId(
manifest,
specInterface,
)
if (!serviceInterfaceId) {
mutConfigValue[key] = ""
return
} }
const filled = await utils
.getServiceInterface(effects, {
packageId: specValue["package-id"],
id: serviceInterfaceId,
})
.once()
.catch((x) => {
console.error("Could not get the service interface", x)
return null
})
const catchFn = <X>(fn: () => X) => {
try {
return fn()
} catch (e) {
return undefined
}
}
const url: string =
filled === null || filled.addressInfo === null
? ""
: catchFn(() =>
utils.hostnameInfoToAddress(
specValue.target === "lan-address"
? filled.addressInfo!.localHostnames[0] ||
filled.addressInfo!.onionHostnames[0]
: filled.addressInfo!.onionHostnames[0] ||
filled.addressInfo!.localHostnames[0],
),
) || ""
mutConfigValue[key] = url
} }
const url: string =
filled === null || filled.addressInfo === null
? ""
: catchFn(() =>
utils.hostnameInfoToAddress(
specValue.target === "lan-address"
? filled.addressInfo!.localHostnames[0] ||
filled.addressInfo!.onionHostnames[0]
: filled.addressInfo!.onionHostnames[0] ||
filled.addressInfo!.localHostnames[0],
),
) || ""
mutConfigValue[key] = url
} }
} }
} }
@@ -1077,7 +1023,9 @@ function extractServiceInterfaceId(manifest: Manifest, specInterface: string) {
const serviceInterfaceId = `${specInterface}-${internalPort}` const serviceInterfaceId = `${specInterface}-${internalPort}`
return serviceInterfaceId return serviceInterfaceId
} }
async function convertToNewConfig(value: T.ConfigRes): Promise<T.ConfigRes> { async function convertToNewConfig(
value: OldGetConfigRes,
): Promise<T.ConfigRes> {
const valueSpec: OldConfigSpec = matchOldConfigSpec.unsafeCast(value.spec) const valueSpec: OldConfigSpec = matchOldConfigSpec.unsafeCast(value.spec)
const spec = transformConfigSpec(valueSpec) const spec = transformConfigSpec(valueSpec)
if (!value.config) return { spec, config: null } if (!value.config) return { spec, config: null }

View File

@@ -0,0 +1,33 @@
import { matchOldConfigSpec, transformConfigSpec } from "./transformConfigSpec"
import fixtureEmbasyPagesConfig from "./__fixtures__/embasyPagesConfig"
import searNXG from "./__fixtures__/searNXG"
import bitcoind from "./__fixtures__/bitcoind"
import nostr from "./__fixtures__/nostr"
describe("transformConfigSpec", () => {
test("matchOldConfigSpec(embassyPages.homepage.variants[web-page])", () => {
matchOldConfigSpec.unsafeCast(
fixtureEmbasyPagesConfig.homepage.variants["web-page"],
)
})
test("matchOldConfigSpec(embassyPages)", () => {
matchOldConfigSpec.unsafeCast(fixtureEmbasyPagesConfig)
})
test("transformConfigSpec(embassyPages)", () => {
const spec = matchOldConfigSpec.unsafeCast(fixtureEmbasyPagesConfig)
expect(transformConfigSpec(spec)).toMatchSnapshot()
})
test("transformConfigSpec(searNXG)", () => {
const spec = matchOldConfigSpec.unsafeCast(searNXG)
expect(transformConfigSpec(spec)).toMatchSnapshot()
})
test("transformConfigSpec(bitcoind)", () => {
const spec = matchOldConfigSpec.unsafeCast(bitcoind)
expect(transformConfigSpec(spec)).toMatchSnapshot()
})
test("transformConfigSpec(nostr)", () => {
const spec = matchOldConfigSpec.unsafeCast(nostr)
expect(transformConfigSpec(spec)).toMatchSnapshot()
})
})

View File

@@ -12,6 +12,7 @@ import {
deferred, deferred,
every, every,
nill, nill,
literal,
} from "ts-matches" } from "ts-matches"
export function transformConfigSpec(oldSpec: OldConfigSpec): CT.InputSpec { export function transformConfigSpec(oldSpec: OldConfigSpec): CT.InputSpec {
@@ -38,7 +39,7 @@ export function transformConfigSpec(oldSpec: OldConfigSpec): CT.InputSpec {
values: oldVal.values.reduce( values: oldVal.values.reduce(
(obj, curr) => ({ (obj, curr) => ({
...obj, ...obj,
[curr]: oldVal["value-names"][curr], [curr]: oldVal["value-names"][curr] || curr,
}), }),
{}, {},
), ),
@@ -104,12 +105,12 @@ export function transformConfigSpec(oldSpec: OldConfigSpec): CT.InputSpec {
: [], : [],
minLength: null, minLength: null,
maxLength: null, maxLength: null,
masked: oldVal.masked, masked: oldVal.masked || false,
generate: null, generate: null,
inputmode: "text", inputmode: "text",
placeholder: oldVal.placeholder || null, placeholder: oldVal.placeholder || null,
} }
} else { } else if (oldVal.type === "union") {
newVal = { newVal = {
type: "union", type: "union",
name: oldVal.tag.name, name: oldVal.tag.name,
@@ -119,7 +120,7 @@ export function transformConfigSpec(oldSpec: OldConfigSpec): CT.InputSpec {
(obj, [id, spec]) => ({ (obj, [id, spec]) => ({
...obj, ...obj,
[id]: { [id]: {
name: oldVal.tag["variant-names"][id], name: oldVal.tag["variant-names"][id] || id,
spec: transformConfigSpec(matchOldConfigSpec.unsafeCast(spec)), spec: transformConfigSpec(matchOldConfigSpec.unsafeCast(spec)),
}, },
}), }),
@@ -130,6 +131,10 @@ export function transformConfigSpec(oldSpec: OldConfigSpec): CT.InputSpec {
default: oldVal.default, default: oldVal.default,
immutable: false, immutable: false,
} }
} else if (oldVal.type === "pointer") {
return inputSpec
} else {
throw new Error(`unknown spec ${JSON.stringify(oldVal)}`)
} }
return { return {
@@ -175,6 +180,10 @@ export function transformOldConfigToNew(
) )
} }
if (isPointer(val)) {
return obj
}
return { return {
...obj, ...obj,
[key]: newVal, [key]: newVal,
@@ -201,7 +210,7 @@ export function transformNewConfigToOld(
[val.tag.id]: config[key].selection, [val.tag.id]: config[key].selection,
...transformNewConfigToOld( ...transformNewConfigToOld(
matchOldConfigSpec.unsafeCast(val.variants[config[key].selection]), matchOldConfigSpec.unsafeCast(val.variants[config[key].selection]),
config[key].unionSelectValue, config[key].value,
), ),
} }
} }
@@ -276,7 +285,7 @@ function getListSpec(
: [], : [],
minLength: null, minLength: null,
maxLength: null, maxLength: null,
masked: oldVal.spec.masked, masked: oldVal.spec.masked || false,
generate: null, generate: null,
inputmode: "text", inputmode: "text",
placeholder: oldVal.spec.placeholder || null, placeholder: oldVal.spec.placeholder || null,
@@ -292,7 +301,7 @@ function getListSpec(
spec: transformConfigSpec( spec: transformConfigSpec(
matchOldConfigSpec.unsafeCast(oldVal.spec.spec), matchOldConfigSpec.unsafeCast(oldVal.spec.spec),
), ),
uniqueBy: oldVal.spec["unique-by"], uniqueBy: oldVal.spec["unique-by"] || null,
displayAs: oldVal.spec["display-as"] || null, displayAs: oldVal.spec["display-as"] || null,
}, },
} }
@@ -313,6 +322,10 @@ function isList(val: OldValueSpec): val is OldValueSpecList {
return val.type === "list" return val.type === "list"
} }
function isPointer(val: OldValueSpec): val is OldValueSpecPointer {
return val.type === "pointer"
}
function isEnumList( function isEnumList(
val: OldValueSpecList, val: OldValueSpecList,
): val is OldValueSpecList & { subtype: "enum" } { ): val is OldValueSpecList & { subtype: "enum" } {
@@ -347,11 +360,11 @@ type OldDefaultString = typeof matchOldDefaultString._TYPE
export const matchOldValueSpecString = object( export const matchOldValueSpecString = object(
{ {
type: literals("string"),
name: string,
masked: boolean, masked: boolean,
copyable: boolean, copyable: boolean,
type: literals("string"),
nullable: boolean, nullable: boolean,
name: string,
placeholder: string, placeholder: string,
pattern: string, pattern: string,
"pattern-description": string, "pattern-description": string,
@@ -361,6 +374,9 @@ export const matchOldValueSpecString = object(
warning: string, warning: string,
}, },
[ [
"masked",
"copyable",
"nullable",
"placeholder", "placeholder",
"pattern", "pattern",
"pattern-description", "pattern-description",
@@ -466,7 +482,7 @@ const matchOldListValueSpecObject = object(
"unique-by": matchOldUniqueBy, // indicates whether duplicates can be permitted in the list "unique-by": matchOldUniqueBy, // indicates whether duplicates can be permitted in the list
"display-as": string, // this should be a handlebars template which can make use of the entire config which corresponds to 'spec' "display-as": string, // this should be a handlebars template which can make use of the entire config which corresponds to 'spec'
}, },
["display-as"], ["display-as", "unique-by"],
) )
const matchOldListValueSpecString = object( const matchOldListValueSpecString = object(
{ {
@@ -476,7 +492,7 @@ const matchOldListValueSpecString = object(
"pattern-description": string, "pattern-description": string,
placeholder: string, placeholder: string,
}, },
["pattern", "pattern-description", "placeholder"], ["pattern", "pattern-description", "placeholder", "copyable", "masked"],
) )
const matchOldListValueSpecEnum = object({ const matchOldListValueSpecEnum = object({
@@ -519,6 +535,28 @@ const matchOldValueSpecList = every(
) )
type OldValueSpecList = typeof matchOldValueSpecList._TYPE type OldValueSpecList = typeof matchOldValueSpecList._TYPE
const matchOldValueSpecPointer = every(
object({
type: literal("pointer"),
}),
anyOf(
object({
subtype: literal("package"),
target: literals("tor-key", "tor-address", "lan-address"),
"package-id": string,
interface: string,
}),
object({
subtype: literal("package"),
target: literals("config"),
"package-id": string,
selector: string,
multi: boolean,
}),
),
)
type OldValueSpecPointer = typeof matchOldValueSpecPointer._TYPE
export const matchOldValueSpec = anyOf( export const matchOldValueSpec = anyOf(
matchOldValueSpecString, matchOldValueSpecString,
matchOldValueSpecNumber, matchOldValueSpecNumber,
@@ -527,6 +565,7 @@ export const matchOldValueSpec = anyOf(
matchOldValueSpecEnum, matchOldValueSpecEnum,
matchOldValueSpecList, matchOldValueSpecList,
matchOldValueSpecUnion, matchOldValueSpecUnion,
matchOldValueSpecPointer,
) )
type OldValueSpec = typeof matchOldValueSpec._TYPE type OldValueSpec = typeof matchOldValueSpec._TYPE

View File

@@ -75,7 +75,7 @@ export class SystemForStartOs implements System {
effects: Effects, effects: Effects,
options: { options: {
procedure: Procedure procedure: Procedure
input: unknown input?: unknown
timeout?: number | undefined timeout?: number | undefined
}, },
): Promise<RpcResult> { ): Promise<RpcResult> {
@@ -137,7 +137,7 @@ export class SystemForStartOs implements System {
effects: Effects | MainEffects, effects: Effects | MainEffects,
options: { options: {
procedure: Procedure procedure: Procedure
input: unknown input?: unknown
timeout?: number | undefined timeout?: number | undefined
}, },
): Promise<unknown> { ): Promise<unknown> {
@@ -219,7 +219,7 @@ export class SystemForStartOs implements System {
async sandbox( async sandbox(
effects: Effects, effects: Effects,
options: { procedure: Procedure; input: unknown; timeout?: number }, options: { procedure: Procedure; input?: unknown; timeout?: number },
): Promise<RpcResult> { ): Promise<RpcResult> {
return this.execute(effects, options) return this.execute(effects, options)
} }

View File

@@ -13,9 +13,10 @@
"declaration": true, "declaration": true,
"noImplicitAny": true, "noImplicitAny": true,
"esModuleInterop": true, "esModuleInterop": true,
"types": ["node"], "types": ["node", "jest"],
"moduleResolution": "Node16", "moduleResolution": "Node16",
"skipLibCheck": true "skipLibCheck": true,
"resolveJsonModule": true
}, },
"ts-node": { "ts-node": {
"compilerOptions": { "compilerOptions": {

View File

@@ -8,7 +8,11 @@ if mountpoint tmp/combined; then sudo umount -R tmp/combined; fi
if mountpoint tmp/lower; then sudo umount tmp/lower; fi if mountpoint tmp/lower; then sudo umount tmp/lower; fi
sudo rm -rf tmp sudo rm -rf tmp
mkdir -p tmp/lower tmp/upper tmp/work tmp/combined mkdir -p tmp/lower tmp/upper tmp/work tmp/combined
sudo mount -o loop -t squashfs debian.${ARCH}.squashfs tmp/lower if which squashfuse > /dev/null; then
sudo squashfuse debian.${ARCH}.squashfs tmp/lower
else
sudo mount debian.${ARCH}.squashfs tmp/lower
fi
sudo mount -t overlay -olowerdir=tmp/lower,upperdir=tmp/upper,workdir=tmp/work overlay tmp/combined sudo mount -t overlay -olowerdir=tmp/lower,upperdir=tmp/upper,workdir=tmp/work overlay tmp/combined
QEMU= QEMU=

251
core/Cargo.lock generated
View File

@@ -107,9 +107,9 @@ dependencies = [
[[package]] [[package]]
name = "anstream" name = "anstream"
version = "0.6.14" version = "0.6.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"anstyle-parse", "anstyle-parse",
@@ -122,33 +122,33 @@ dependencies = [
[[package]] [[package]]
name = "anstyle" name = "anstyle"
version = "1.0.7" version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
[[package]] [[package]]
name = "anstyle-parse" name = "anstyle-parse"
version = "0.2.4" version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb"
dependencies = [ dependencies = [
"utf8parse", "utf8parse",
] ]
[[package]] [[package]]
name = "anstyle-query" name = "anstyle-query"
version = "1.1.0" version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a"
dependencies = [ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]] [[package]]
name = "anstyle-wincon" name = "anstyle-wincon"
version = "3.0.3" version = "3.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"windows-sys 0.52.0", "windows-sys 0.52.0",
@@ -162,9 +162,9 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]] [[package]]
name = "arrayref" name = "arrayref"
version = "0.3.7" version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a"
[[package]] [[package]]
name = "arrayvec" name = "arrayvec"
@@ -200,9 +200,9 @@ dependencies = [
[[package]] [[package]]
name = "async-compression" name = "async-compression"
version = "0.4.11" version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd066d0b4ef8ecb03a55319dc13aa6910616d0f44008a045bb1835af830abff5" checksum = "fec134f64e2bc57411226dfc4e52dec859ddfc7e711fc5e07b612584f000e4aa"
dependencies = [ dependencies = [
"brotli", "brotli",
"flate2", "flate2",
@@ -231,7 +231,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -242,7 +242,7 @@ checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -268,9 +268,9 @@ checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"
[[package]] [[package]]
name = "aws-lc-rs" name = "aws-lc-rs"
version = "1.8.0" version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8a47f2fb521b70c11ce7369a6c5fa4bd6af7e5d62ec06303875bafe7c6ba245" checksum = "4ae74d9bd0a7530e8afd1770739ad34b36838829d6ad61818f9230f683f5ad77"
dependencies = [ dependencies = [
"aws-lc-sys", "aws-lc-sys",
"mirai-annotations", "mirai-annotations",
@@ -280,9 +280,9 @@ dependencies = [
[[package]] [[package]]
name = "aws-lc-sys" name = "aws-lc-sys"
version = "0.19.0" version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2927c7af777b460b7ccd95f8b67acd7b4c04ec8896bf0c8e80ba30523cffc057" checksum = "2e89b6941c2d1a7045538884d6e760ccfffdf8e1ffc2613d8efa74305e1f3752"
dependencies = [ dependencies = [
"bindgen", "bindgen",
"cc", "cc",
@@ -532,7 +532,7 @@ dependencies = [
"regex", "regex",
"rustc-hash", "rustc-hash",
"shlex", "shlex",
"syn 2.0.71", "syn 2.0.72",
"which", "which",
] ]
@@ -687,9 +687,9 @@ checksum = "981520c98f422fcc584dc1a95c334e6953900b9106bc47a9839b81790009eb21"
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.1.5" version = "1.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "324c74f2155653c90b04f25b2a47a8a631360cb908f92a772695f430c7e31052" checksum = "2aba8f4e9906c7ce3c73463f62a7f0c65183ada1a2d47e397cc8810827f9694f"
dependencies = [ dependencies = [
"jobserver", "jobserver",
"libc", "libc",
@@ -799,9 +799,9 @@ dependencies = [
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.5.9" version = "4.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462" checksum = "35723e6a11662c2afb578bcf0b88bf6ea8e21282a953428f240574fcc3a2b5b3"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
"clap_derive", "clap_derive",
@@ -809,9 +809,9 @@ dependencies = [
[[package]] [[package]]
name = "clap_builder" name = "clap_builder"
version = "4.5.9" version = "4.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942" checksum = "49eb96cbfa7cfa35017b7cd548c75b14c3118c98b423041d70562665e07fb0fa"
dependencies = [ dependencies = [
"anstream", "anstream",
"anstyle", "anstyle",
@@ -821,21 +821,21 @@ dependencies = [
[[package]] [[package]]
name = "clap_derive" name = "clap_derive"
version = "4.5.8" version = "4.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085" checksum = "5d029b67f89d30bbb547c89fd5161293c0aec155fc691d7924b64550662db93e"
dependencies = [ dependencies = [
"heck 0.5.0", "heck 0.5.0",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
name = "clap_lex" name = "clap_lex"
version = "0.7.1" version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
[[package]] [[package]]
name = "cmake" name = "cmake"
@@ -875,9 +875,9 @@ dependencies = [
[[package]] [[package]]
name = "colorchoice" name = "colorchoice"
version = "1.0.1" version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0"
[[package]] [[package]]
name = "concurrent-queue" name = "concurrent-queue"
@@ -956,18 +956,18 @@ checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]] [[package]]
name = "const_format" name = "const_format"
version = "0.2.32" version = "0.2.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673" checksum = "c990efc7a285731f9a4378d81aff2f0e85a2c8781a05ef0f8baa8dac54d0ff48"
dependencies = [ dependencies = [
"const_format_proc_macros", "const_format_proc_macros",
] ]
[[package]] [[package]]
name = "const_format_proc_macros" name = "const_format_proc_macros"
version = "0.2.32" version = "0.2.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500" checksum = "e026b6ce194a874cb9cf32cd5772d1ef9767cc8fcb5765948d74f37a9d8b2bf6"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -1125,7 +1125,7 @@ dependencies = [
"crossterm_winapi", "crossterm_winapi",
"futures-core", "futures-core",
"libc", "libc",
"mio", "mio 0.8.11",
"parking_lot", "parking_lot",
"signal-hook", "signal-hook",
"signal-hook-mio", "signal-hook-mio",
@@ -1236,7 +1236,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -1260,7 +1260,7 @@ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"strsim 0.11.1", "strsim 0.11.1",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -1271,7 +1271,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806"
dependencies = [ dependencies = [
"darling_core", "darling_core",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -1302,7 +1302,7 @@ dependencies = [
"proc-macro-crate", "proc-macro-crate",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -1325,7 +1325,7 @@ checksum = "8034092389675178f570469e6c3b0465d3d30b4505c294a6550db47f3c17ad18"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -1348,7 +1348,7 @@ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"rustc_version", "rustc_version",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -1570,7 +1570,7 @@ dependencies = [
"heck 0.4.1", "heck 0.4.1",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -1828,7 +1828,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -2236,7 +2236,7 @@ dependencies = [
"http 1.1.0", "http 1.1.0",
"hyper 1.4.1", "hyper 1.4.1",
"hyper-util", "hyper-util",
"rustls 0.23.11", "rustls 0.23.12",
"rustls-pki-types", "rustls-pki-types",
"tokio", "tokio",
"tokio-rustls", "tokio-rustls",
@@ -2515,9 +2515,9 @@ dependencies = [
[[package]] [[package]]
name = "is_terminal_polyfill" name = "is_terminal_polyfill"
version = "1.70.0" version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]] [[package]]
name = "isocountry" name = "isocountry"
@@ -2620,21 +2620,21 @@ dependencies = [
[[package]] [[package]]
name = "jobserver" name = "jobserver"
version = "0.1.31" version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0"
dependencies = [ dependencies = [
"libc", "libc",
] ]
[[package]] [[package]]
name = "josekit" name = "josekit"
version = "0.8.6" version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0953340cf63354cec4a385f1fbcb3f409a5823778cae236078892f6030ed4565" checksum = "54b85e2125819afc4fd2ae57416207e792c7e12797858e5db2a6c6f24a166829"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"base64 0.21.7", "base64 0.22.1",
"flate2", "flate2",
"once_cell", "once_cell",
"openssl", "openssl",
@@ -2777,9 +2777,9 @@ checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
[[package]] [[package]]
name = "libloading" name = "libloading"
version = "0.8.4" version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e310b3a6b5907f99202fcdb4960ff45b93735d7c7d96b760fcff8db2dc0e103d" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"windows-targets 0.52.6", "windows-targets 0.52.6",
@@ -2955,6 +2955,18 @@ dependencies = [
"windows-sys 0.48.0", "windows-sys 0.48.0",
] ]
[[package]]
name = "mio"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4569e456d394deccd22ce1c1913e6ea0e54519f577285001215d33557431afe4"
dependencies = [
"hermit-abi",
"libc",
"wasi 0.11.0+wasi-snapshot-preview1",
"windows-sys 0.52.0",
]
[[package]] [[package]]
name = "mirai-annotations" name = "mirai-annotations"
version = "1.12.0" version = "1.12.0"
@@ -3229,7 +3241,7 @@ dependencies = [
"proc-macro-crate", "proc-macro-crate",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -3274,9 +3286,9 @@ dependencies = [
[[package]] [[package]]
name = "openssl" name = "openssl"
version = "0.10.64" version = "0.10.66"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1"
dependencies = [ dependencies = [
"bitflags 2.6.0", "bitflags 2.6.0",
"cfg-if", "cfg-if",
@@ -3295,7 +3307,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -3315,9 +3327,9 @@ dependencies = [
[[package]] [[package]]
name = "openssl-sys" name = "openssl-sys"
version = "0.9.102" version = "0.9.103"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@@ -3502,7 +3514,7 @@ dependencies = [
"pest_meta", "pest_meta",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -3558,7 +3570,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -3602,9 +3614,9 @@ checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec"
[[package]] [[package]]
name = "portable-atomic" name = "portable-atomic"
version = "1.6.0" version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265"
[[package]] [[package]]
name = "powerfmt" name = "powerfmt"
@@ -3631,7 +3643,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e" checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -3703,7 +3715,7 @@ checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -3726,7 +3738,7 @@ dependencies = [
"itertools 0.12.1", "itertools 0.12.1",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -4181,15 +4193,15 @@ dependencies = [
[[package]] [[package]]
name = "rustls" name = "rustls"
version = "0.23.11" version = "0.23.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4828ea528154ae444e5a642dbb7d5623354030dc9822b83fd9bb79683c7399d0" checksum = "c58f8c84392efc0a126acce10fa59ff7b3d2ac06ab451a33f2741989b806b044"
dependencies = [ dependencies = [
"aws-lc-rs", "aws-lc-rs",
"log", "log",
"once_cell", "once_cell",
"rustls-pki-types", "rustls-pki-types",
"rustls-webpki 0.102.5", "rustls-webpki 0.102.6",
"subtle", "subtle",
"zeroize", "zeroize",
] ]
@@ -4231,9 +4243,9 @@ dependencies = [
[[package]] [[package]]
name = "rustls-webpki" name = "rustls-webpki"
version = "0.102.5" version = "0.102.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9a6fccd794a42c2c105b513a2f62bc3fd8f3ba57a4593677ceb0bd035164d78" checksum = "8e6b52d4fda176fd835fdc55a835d4a89b8499cad995885a21149d5ad62f852e"
dependencies = [ dependencies = [
"aws-lc-rs", "aws-lc-rs",
"ring", "ring",
@@ -4395,7 +4407,7 @@ checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -4422,9 +4434,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_spanned" name = "serde_spanned"
version = "0.6.6" version = "0.6.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d"
dependencies = [ dependencies = [
"serde", "serde",
] ]
@@ -4468,7 +4480,7 @@ dependencies = [
"darling", "darling",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -4571,7 +4583,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af" checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af"
dependencies = [ dependencies = [
"libc", "libc",
"mio", "mio 0.8.11",
"signal-hook", "signal-hook",
] ]
@@ -4880,9 +4892,9 @@ dependencies = [
[[package]] [[package]]
name = "sscanf" name = "sscanf"
version = "0.4.1" version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c713ebd15ce561dd4a13ed62bc2a0368e16806fc30dcaf66ecf1256b2a3fdde6" checksum = "a147d3cf7e723671ed11355b5b008c8019195f7fc902e213f5557d931e9f839d"
dependencies = [ dependencies = [
"const_format", "const_format",
"lazy_static", "lazy_static",
@@ -4892,16 +4904,16 @@ dependencies = [
[[package]] [[package]]
name = "sscanf_macro" name = "sscanf_macro"
version = "0.4.1" version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84955aa74a157e5834d58a07be11af7f0ab923f0194a0bb2ea6b3db8b5d1611d" checksum = "af3a37bdf8e90e77cc60f74473edf28d922ae2eacdd595e67724ccd2381774cc"
dependencies = [ dependencies = [
"convert_case 0.6.0", "convert_case 0.6.0",
"proc-macro2", "proc-macro2",
"quote", "quote",
"regex-syntax 0.6.29", "regex-syntax 0.6.29",
"strsim 0.10.0", "strsim 0.10.0",
"syn 2.0.71", "syn 2.0.72",
"unicode-width", "unicode-width",
] ]
@@ -4949,7 +4961,7 @@ dependencies = [
[[package]] [[package]]
name = "start-os" name = "start-os"
version = "0.3.6-alpha.0" version = "0.3.6-alpha.3"
dependencies = [ dependencies = [
"aes", "aes",
"async-compression", "async-compression",
@@ -5060,7 +5072,7 @@ dependencies = [
"tokio-tar", "tokio-tar",
"tokio-tungstenite 0.23.1", "tokio-tungstenite 0.23.1",
"tokio-util", "tokio-util",
"toml 0.8.15", "toml 0.8.16",
"torut", "torut",
"tower-service", "tower-service",
"tracing", "tracing",
@@ -5138,9 +5150,9 @@ dependencies = [
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.71" version = "2.0.72"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b146dcf730474b4bcd16c311627b31ede9ab149045db4d6088b3becaea046462" checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -5267,7 +5279,7 @@ checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -5348,22 +5360,21 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.38.1" version = "1.39.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb2caba9f80616f438e09748d5acda951967e1ea58508ef53d9c6402485a46df" checksum = "d040ac2b29ab03b09d4129c2f5bbd012a3ac2f79d38ff506a4bf8dd34b0eac8a"
dependencies = [ dependencies = [
"backtrace", "backtrace",
"bytes", "bytes",
"libc", "libc",
"mio", "mio 1.0.1",
"num_cpus",
"parking_lot", "parking_lot",
"pin-project-lite", "pin-project-lite",
"signal-hook-registry", "signal-hook-registry",
"socket2", "socket2",
"tokio-macros", "tokio-macros",
"tracing", "tracing",
"windows-sys 0.48.0", "windows-sys 0.52.0",
] ]
[[package]] [[package]]
@@ -5378,13 +5389,13 @@ dependencies = [
[[package]] [[package]]
name = "tokio-macros" name = "tokio-macros"
version = "2.3.0" version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -5403,7 +5414,7 @@ version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4"
dependencies = [ dependencies = [
"rustls 0.23.11", "rustls 0.23.12",
"rustls-pki-types", "rustls-pki-types",
"tokio", "tokio",
] ]
@@ -5499,21 +5510,21 @@ dependencies = [
[[package]] [[package]]
name = "toml" name = "toml"
version = "0.8.15" version = "0.8.16"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac2caab0bf757388c6c0ae23b3293fdb463fee59434529014f85e3263b995c28" checksum = "81967dd0dd2c1ab0bc3468bd7caecc32b8a4aa47d0c8c695d8c2b2108168d62c"
dependencies = [ dependencies = [
"serde", "serde",
"serde_spanned", "serde_spanned",
"toml_datetime", "toml_datetime",
"toml_edit 0.22.16", "toml_edit 0.22.17",
] ]
[[package]] [[package]]
name = "toml_datetime" name = "toml_datetime"
version = "0.6.6" version = "0.6.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" checksum = "f8fb9f64314842840f1d940ac544da178732128f1c78c21772e876579e0da1db"
dependencies = [ dependencies = [
"serde", "serde",
] ]
@@ -5544,15 +5555,15 @@ dependencies = [
[[package]] [[package]]
name = "toml_edit" name = "toml_edit"
version = "0.22.16" version = "0.22.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "278f3d518e152219c994ce877758516bca5e118eaed6996192a774fb9fbf0788" checksum = "8d9f8729f5aea9562aac1cc0441f5d6de3cff1ee0c5d67293eeca5eb36ee7c16"
dependencies = [ dependencies = [
"indexmap 2.2.6", "indexmap 2.2.6",
"serde", "serde",
"serde_spanned", "serde_spanned",
"toml_datetime", "toml_datetime",
"winnow 0.6.13", "winnow 0.6.16",
] ]
[[package]] [[package]]
@@ -5653,7 +5664,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -5805,7 +5816,7 @@ dependencies = [
"Inflector", "Inflector",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
"termcolor", "termcolor",
] ]
@@ -5865,7 +5876,7 @@ checksum = "1f718dfaf347dcb5b983bfc87608144b0bad87970aebcbea5ce44d2a30c08e63"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -5936,9 +5947,9 @@ checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202"
[[package]] [[package]]
name = "unicode-width" name = "unicode-width"
version = "0.1.13" version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6"
[[package]] [[package]]
name = "unicode-xid" name = "unicode-xid"
@@ -6011,9 +6022,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]] [[package]]
name = "version_check" name = "version_check"
version = "0.9.4" version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]] [[package]]
name = "wait-timeout" name = "wait-timeout"
@@ -6082,7 +6093,7 @@ dependencies = [
"once_cell", "once_cell",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@@ -6116,7 +6127,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
"wasm-bindgen-backend", "wasm-bindgen-backend",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@@ -6368,9 +6379,9 @@ dependencies = [
[[package]] [[package]]
name = "winnow" name = "winnow"
version = "0.6.13" version = "0.6.16"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59b5e5f6c299a3c7890b876a2a587f3115162487e704907d9b6cd29473052ba1" checksum = "b480ae9340fc261e6be3e95a1ba86d54ae3f9171132a73ce8d4bbaf68339507c"
dependencies = [ dependencies = [
"memchr", "memchr",
] ]
@@ -6470,7 +6481,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]
@@ -6490,7 +6501,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.71", "syn 2.0.72",
] ]
[[package]] [[package]]

View File

@@ -28,15 +28,12 @@ set +e
fail= fail=
echo "FEATURES=\"$FEATURES\"" echo "FEATURES=\"$FEATURES\""
echo "RUSTFLAGS=\"$RUSTFLAGS\"" echo "RUSTFLAGS=\"$RUSTFLAGS\""
if ! rust-musl-builder sh -c "(cd core && cargo build --release --no-default-features --features container-runtime,$FEATURES --locked --bin containerbox --target=$ARCH-unknown-linux-musl)"; then if ! rust-musl-builder sh -c "cd core && cargo build --release --no-default-features --features container-runtime,$FEATURES --locked --bin containerbox --target=$ARCH-unknown-linux-musl && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"; then
fail=true fail=true
fi fi
set -e set -e
cd core cd core
sudo chown -R $USER target
sudo chown -R $USER ~/.cargo
if [ -n "$fail" ]; then if [ -n "$fail" ]; then
exit 1 exit 1
fi fi

View File

@@ -28,15 +28,12 @@ set +e
fail= fail=
echo "FEATURES=\"$FEATURES\"" echo "FEATURES=\"$FEATURES\""
echo "RUSTFLAGS=\"$RUSTFLAGS\"" echo "RUSTFLAGS=\"$RUSTFLAGS\""
if ! rust-musl-builder sh -c "(cd core && cargo build --release --no-default-features --features cli,registry,$FEATURES --locked --bin registrybox --target=$ARCH-unknown-linux-musl)"; then if ! rust-musl-builder sh -c "cd core && cargo build --release --no-default-features --features cli,registry,$FEATURES --locked --bin registrybox --target=$ARCH-unknown-linux-musl && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"; then
fail=true fail=true
fi fi
set -e set -e
cd core cd core
sudo chown -R $USER target
sudo chown -R $USER ~/.cargo
if [ -n "$fail" ]; then if [ -n "$fail" ]; then
exit 1 exit 1
fi fi

View File

@@ -28,15 +28,12 @@ set +e
fail= fail=
echo "FEATURES=\"$FEATURES\"" echo "FEATURES=\"$FEATURES\""
echo "RUSTFLAGS=\"$RUSTFLAGS\"" echo "RUSTFLAGS=\"$RUSTFLAGS\""
if ! rust-musl-builder sh -c "(cd core && cargo build --release --no-default-features --features cli,daemon,$FEATURES --locked --bin startbox --target=$ARCH-unknown-linux-musl)"; then if ! rust-musl-builder sh -c "cd core && cargo build --release --no-default-features --features cli,daemon,$FEATURES --locked --bin startbox --target=$ARCH-unknown-linux-musl && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"; then
fail=true fail=true
fi fi
set -e set -e
cd core cd core
sudo chown -R $USER target
sudo chown -R $USER ~/.cargo
if [ -n "$fail" ]; then if [ -n "$fail" ]; then
exit 1 exit 1
fi fi

View File

@@ -50,7 +50,8 @@ pub async fn canonicalize(
} }
let path = path.as_ref(); let path = path.as_ref();
if tokio::fs::metadata(path).await.is_err() { if tokio::fs::metadata(path).await.is_err() {
if let (Some(parent), Some(file_name)) = (path.parent(), path.file_name()) { let parent = path.parent().unwrap_or(Path::new("."));
if let Some(file_name) = path.file_name() {
if create_parent && tokio::fs::metadata(parent).await.is_err() { if create_parent && tokio::fs::metadata(parent).await.is_err() {
return Ok(create_canonical_folder(parent).await?.join(file_name)); return Ok(create_canonical_folder(parent).await?.join(file_name));
} else { } else {

View File

@@ -490,6 +490,7 @@ where
{ {
fn with_kind(self, kind: ErrorKind) -> Result<T, Error>; fn with_kind(self, kind: ErrorKind) -> Result<T, Error>;
fn with_ctx<F: FnOnce(&E) -> (ErrorKind, D), D: Display>(self, f: F) -> Result<T, Error>; fn with_ctx<F: FnOnce(&E) -> (ErrorKind, D), D: Display>(self, f: F) -> Result<T, Error>;
fn log_err(self) -> Option<T>;
} }
impl<T, E> ResultExt<T, E> for Result<T, E> impl<T, E> ResultExt<T, E> for Result<T, E>
where where
@@ -516,6 +517,18 @@ where
} }
}) })
} }
fn log_err(self) -> Option<T> {
match self {
Ok(a) => Some(a),
Err(e) => {
let e: color_eyre::eyre::Error = e.into();
tracing::error!("{e}");
tracing::debug!("{e:?}");
None
}
}
}
} }
impl<T> ResultExt<T, Error> for Result<T, Error> { impl<T> ResultExt<T, Error> for Result<T, Error> {
fn with_kind(self, kind: ErrorKind) -> Result<T, Error> { fn with_kind(self, kind: ErrorKind) -> Result<T, Error> {
@@ -539,6 +552,17 @@ impl<T> ResultExt<T, Error> for Result<T, Error> {
} }
}) })
} }
fn log_err(self) -> Option<T> {
match self {
Ok(a) => Some(a),
Err(e) => {
tracing::error!("{e}");
tracing::debug!("{e:?}");
None
}
}
}
} }
pub trait OptionExt<T> pub trait OptionExt<T>

View File

@@ -14,7 +14,7 @@ keywords = [
name = "start-os" name = "start-os"
readme = "README.md" readme = "README.md"
repository = "https://github.com/Start9Labs/start-os" repository = "https://github.com/Start9Labs/start-os"
version = "0.3.6-alpha.0" version = "0.3.6-alpha.3"
license = "MIT" license = "MIT"
[lib] [lib]
@@ -98,7 +98,12 @@ hex = "0.4.3"
hmac = "0.12.1" hmac = "0.12.1"
http = "1.0.0" http = "1.0.0"
http-body-util = "0.1" http-body-util = "0.1"
hyper-util = { version = "0.1.5", features = ["tokio", "service"] } hyper-util = { version = "0.1.5", features = [
"tokio",
"service",
"http1",
"http2",
] }
id-pool = { version = "0.2.2", default-features = false, features = [ id-pool = { version = "0.2.2", default-features = false, features = [
"serde", "serde",
"u16", "u16",

View File

@@ -58,6 +58,7 @@ pub struct ActionParams {
pub action_id: ActionId, pub action_id: ActionId,
#[command(flatten)] #[command(flatten)]
#[ts(type = "{ [key: string]: any } | null")] #[ts(type = "{ [key: string]: any } | null")]
#[serde(default)]
pub input: StdinDeserializable<Option<Config>>, pub input: StdinDeserializable<Option<Config>>,
} }
// impl C // impl C

View File

@@ -185,6 +185,8 @@ pub struct LoginParams {
#[serde(rename = "__auth_userAgent")] // from Auth middleware #[serde(rename = "__auth_userAgent")] // from Auth middleware
user_agent: Option<String>, user_agent: Option<String>,
#[serde(default)] #[serde(default)]
ephemeral: bool,
#[serde(default)]
#[ts(type = "any")] #[ts(type = "any")]
metadata: Value, metadata: Value,
} }
@@ -195,28 +197,46 @@ pub async fn login_impl(
LoginParams { LoginParams {
password, password,
user_agent, user_agent,
ephemeral,
metadata, metadata,
}: LoginParams, }: LoginParams,
) -> Result<LoginRes, Error> { ) -> Result<LoginRes, Error> {
let password = password.unwrap_or_default().decrypt(&ctx)?; let password = password.unwrap_or_default().decrypt(&ctx)?;
ctx.db if ephemeral {
.mutate(|db| { check_password_against_db(&ctx.db.peek().await, &password)?;
check_password_against_db(db, &password)?; let hash_token = HashSessionToken::new();
let hash_token = HashSessionToken::new(); ctx.ephemeral_sessions.mutate(|s| {
db.as_private_mut().as_sessions_mut().insert( s.0.insert(
hash_token.hashed(), hash_token.hashed().clone(),
&Session { Session {
logged_in: Utc::now(), logged_in: Utc::now(),
last_active: Utc::now(), last_active: Utc::now(),
user_agent, user_agent,
metadata, metadata,
}, },
)?; )
});
Ok(hash_token.to_login_res())
} else {
ctx.db
.mutate(|db| {
check_password_against_db(db, &password)?;
let hash_token = HashSessionToken::new();
db.as_private_mut().as_sessions_mut().insert(
hash_token.hashed(),
&Session {
logged_in: Utc::now(),
last_active: Utc::now(),
user_agent,
metadata,
},
)?;
Ok(hash_token.to_login_res()) Ok(hash_token.to_login_res())
}) })
.await .await
}
} }
#[derive(Deserialize, Serialize, Parser, TS)] #[derive(Deserialize, Serialize, Parser, TS)]
@@ -329,9 +349,15 @@ pub async fn list(
ctx: RpcContext, ctx: RpcContext,
ListParams { session, .. }: ListParams, ListParams { session, .. }: ListParams,
) -> Result<SessionList, Error> { ) -> Result<SessionList, Error> {
let mut sessions = ctx.db.peek().await.into_private().into_sessions().de()?;
ctx.ephemeral_sessions.peek(|s| {
sessions
.0
.extend(s.0.iter().map(|(k, v)| (k.clone(), v.clone())))
});
Ok(SessionList { Ok(SessionList {
current: HashSessionToken::from_token(session).hashed().clone(), current: session,
sessions: ctx.db.peek().await.into_private().into_sessions().de()?, sessions,
}) })
} }

View File

@@ -1,3 +1,4 @@
use std::collections::BTreeSet;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
@@ -178,13 +179,68 @@ pub struct SetParams {
// )] // )]
#[instrument(skip_all)] #[instrument(skip_all)]
pub fn set<C: Context>() -> ParentHandler<C, SetParams, PackageId> { pub fn set<C: Context>() -> ParentHandler<C, SetParams, PackageId> {
ParentHandler::new().root_handler( ParentHandler::new()
from_fn_async(set_impl) .root_handler(
.with_metadata("sync_db", Value::Bool(true)) from_fn_async(set_impl)
.with_inherited(|set_params, id| (id, set_params)) .with_metadata("sync_db", Value::Bool(true))
.no_display() .with_inherited(|set_params, id| (id, set_params))
.with_call_remote::<CliContext>(), .no_display()
) .with_call_remote::<CliContext>(),
)
.subcommand(
"dry",
from_fn_async(set_dry)
.with_inherited(|set_params, id| (id, set_params))
.no_display()
.with_call_remote::<CliContext>(),
)
}
pub async fn set_dry(
ctx: RpcContext,
_: Empty,
(
id,
SetParams {
timeout,
config: StdinDeserializable(config),
},
): (PackageId, SetParams),
) -> Result<BTreeSet<PackageId>, Error> {
let mut breakages = BTreeSet::new();
let procedure_id = Guid::new();
let db = ctx.db.peek().await;
for dep in db
.as_public()
.as_package_data()
.as_entries()?
.into_iter()
.filter_map(
|(k, v)| match v.as_current_dependencies().contains_key(&id) {
Ok(true) => Some(Ok(k)),
Ok(false) => None,
Err(e) => Some(Err(e)),
},
)
{
let dep_id = dep?;
let Some(dependent) = &*ctx.services.get(&dep_id).await else {
continue;
};
if dependent
.dependency_config(procedure_id.clone(), id.clone(), config.clone())
.await?
.is_some()
{
breakages.insert(dep_id);
}
}
Ok(breakages)
} }
#[derive(Default)] #[derive(Default)]

View File

@@ -11,12 +11,13 @@ use josekit::jwk::Jwk;
use reqwest::{Client, Proxy}; use reqwest::{Client, Proxy};
use rpc_toolkit::yajrc::RpcError; use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{CallRemote, Context, Empty}; use rpc_toolkit::{CallRemote, Context, Empty};
use tokio::sync::{broadcast, Mutex, RwLock}; use tokio::sync::{broadcast, watch, Mutex, RwLock};
use tokio::time::Instant; use tokio::time::Instant;
use tracing::instrument; use tracing::instrument;
use super::setup::CURRENT_SECRET; use super::setup::CURRENT_SECRET;
use crate::account::AccountInfo; use crate::account::AccountInfo;
use crate::auth::Sessions;
use crate::context::config::ServerConfig; use crate::context::config::ServerConfig;
use crate::db::model::Database; use crate::db::model::Database;
use crate::dependencies::compute_dependency_config_errs; use crate::dependencies::compute_dependency_config_errs;
@@ -34,6 +35,7 @@ use crate::service::ServiceMap;
use crate::shutdown::Shutdown; use crate::shutdown::Shutdown;
use crate::system::get_mem_info; use crate::system::get_mem_info;
use crate::util::lshw::{lshw, LshwDevice}; use crate::util::lshw::{lshw, LshwDevice};
use crate::util::sync::SyncMutex;
pub struct RpcContextSeed { pub struct RpcContextSeed {
is_closed: AtomicBool, is_closed: AtomicBool,
@@ -42,7 +44,9 @@ pub struct RpcContextSeed {
pub ethernet_interface: String, pub ethernet_interface: String,
pub datadir: PathBuf, pub datadir: PathBuf,
pub disk_guid: Arc<String>, pub disk_guid: Arc<String>,
pub ephemeral_sessions: SyncMutex<Sessions>,
pub db: TypedPatchDb<Database>, pub db: TypedPatchDb<Database>,
pub sync_db: watch::Sender<u64>,
pub account: RwLock<AccountInfo>, pub account: RwLock<AccountInfo>,
pub net_controller: Arc<NetController>, pub net_controller: Arc<NetController>,
pub s9pk_arch: Option<&'static str>, pub s9pk_arch: Option<&'static str>,
@@ -212,6 +216,8 @@ impl RpcContext {
find_eth_iface().await? find_eth_iface().await?
}, },
disk_guid, disk_guid,
ephemeral_sessions: SyncMutex::new(Sessions::new()),
sync_db: watch::Sender::new(db.sequence().await),
db, db,
account: RwLock::new(account), account: RwLock::new(account),
net_controller, net_controller,
@@ -291,7 +297,9 @@ impl RpcContext {
for (package_id, package) in peek.as_public().as_package_data().as_entries()?.into_iter() { for (package_id, package) in peek.as_public().as_package_data().as_entries()?.into_iter() {
let package = package.clone(); let package = package.clone();
let mut current_dependencies = package.as_current_dependencies().de()?; let mut current_dependencies = package.as_current_dependencies().de()?;
compute_dependency_config_errs(self, &package_id, &mut current_dependencies).await?; compute_dependency_config_errs(self, &package_id, &mut current_dependencies)
.await
.log_err();
updated_current_dependents.insert(package_id.clone(), current_dependencies); updated_current_dependents.insert(package_id.clone(), current_dependencies);
} }
self.db self.db

View File

@@ -100,18 +100,6 @@ impl SetupContext {
.with_ctx(|_| (crate::ErrorKind::Filesystem, db_path.display().to_string()))?; .with_ctx(|_| (crate::ErrorKind::Filesystem, db_path.display().to_string()))?;
Ok(db) Ok(db)
} }
#[instrument(skip_all)]
pub async fn secret_store(&self) -> Result<PgPool, Error> {
init_postgres(&self.datadir).await?;
let secret_store =
PgPool::connect_with(PgConnectOptions::new().database("secrets").username("root"))
.await?;
sqlx::migrate!()
.run(&secret_store)
.await
.with_kind(crate::ErrorKind::Database)?;
Ok(secret_store)
}
pub fn run_setup<F, Fut>(&self, f: F) -> Result<(), Error> pub fn run_setup<F, Fut>(&self, f: F) -> Result<(), Error>
where where

View File

@@ -10,10 +10,12 @@ use clap::Parser;
use imbl_value::InternedString; use imbl_value::InternedString;
use itertools::Itertools; use itertools::Itertools;
use patch_db::json_ptr::{JsonPointer, ROOT}; use patch_db::json_ptr::{JsonPointer, ROOT};
use patch_db::{Dump, Revision}; use patch_db::{DiffPatch, Dump, Revision};
use rpc_toolkit::yajrc::RpcError; use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{from_fn_async, Context, HandlerArgs, HandlerExt, ParentHandler}; use rpc_toolkit::{from_fn_async, Context, HandlerArgs, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::sync::mpsc::{self, UnboundedReceiver};
use tokio::sync::watch;
use tracing::instrument; use tracing::instrument;
use ts_rs::TS; use ts_rs::TS;
@@ -124,14 +126,56 @@ pub struct SubscribeRes {
pub guid: Guid, pub guid: Guid,
} }
struct DbSubscriber {
rev: u64,
sub: UnboundedReceiver<Revision>,
sync_db: watch::Receiver<u64>,
}
impl DbSubscriber {
async fn recv(&mut self) -> Option<Revision> {
loop {
tokio::select! {
rev = self.sub.recv() => {
if let Some(rev) = rev.as_ref() {
self.rev = rev.id;
}
return rev
}
_ = self.sync_db.changed() => {
let id = *self.sync_db.borrow();
if id > self.rev {
match self.sub.try_recv() {
Ok(rev) => {
self.rev = rev.id;
return Some(rev)
}
Err(mpsc::error::TryRecvError::Disconnected) => {
return None
}
Err(mpsc::error::TryRecvError::Empty) => {
return Some(Revision { id, patch: DiffPatch::default() })
}
}
}
}
}
}
}
}
pub async fn subscribe( pub async fn subscribe(
ctx: RpcContext, ctx: RpcContext,
SubscribeParams { pointer, session }: SubscribeParams, SubscribeParams { pointer, session }: SubscribeParams,
) -> Result<SubscribeRes, Error> { ) -> Result<SubscribeRes, Error> {
let (dump, mut sub) = ctx let (dump, sub) = ctx
.db .db
.dump_and_sub(pointer.unwrap_or_else(|| PUBLIC.clone())) .dump_and_sub(pointer.unwrap_or_else(|| PUBLIC.clone()))
.await; .await;
let mut sub = DbSubscriber {
rev: dump.id,
sub,
sync_db: ctx.sync_db.subscribe(),
};
let guid = Guid::new(); let guid = Guid::new();
ctx.rpc_continuations ctx.rpc_continuations
.add( .add(

View File

@@ -63,6 +63,18 @@ impl PackageState {
)), )),
} }
} }
pub fn expect_removing(&self) -> Result<&InstalledState, Error> {
match self {
Self::Removing(a) => Ok(a),
_ => Err(Error::new(
eyre!(
"Package {} is not in removing state",
self.as_manifest(ManifestPreference::Old).id
),
ErrorKind::InvalidRequest,
)),
}
}
pub fn into_installing_info(self) -> Option<InstallingInfo> { pub fn into_installing_info(self) -> Option<InstallingInfo> {
match self { match self {
Self::Installing(InstallingState { installing_info }) Self::Installing(InstallingState { installing_info })

View File

@@ -9,13 +9,13 @@ use rpc_toolkit::{from_fn_async, Context, Empty, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::instrument; use tracing::instrument;
use ts_rs::TS; use ts_rs::TS;
use url::Url;
use crate::config::{Config, ConfigSpec, ConfigureContext}; use crate::config::{Config, ConfigSpec, ConfigureContext};
use crate::context::RpcContext; use crate::context::{CliContext, RpcContext};
use crate::db::model::package::CurrentDependencies; use crate::db::model::package::CurrentDependencies;
use crate::prelude::*; use crate::prelude::*;
use crate::rpc_continuations::Guid; use crate::rpc_continuations::Guid;
use crate::util::serde::HandlerExtSerde;
use crate::util::PathOrUrl; use crate::util::PathOrUrl;
use crate::Error; use crate::Error;
@@ -65,11 +65,20 @@ pub struct ConfigureParams {
dependency_id: PackageId, dependency_id: PackageId,
} }
pub fn configure<C: Context>() -> ParentHandler<C, ConfigureParams> { pub fn configure<C: Context>() -> ParentHandler<C, ConfigureParams> {
ParentHandler::new().root_handler( ParentHandler::new()
from_fn_async(configure_impl) .root_handler(
.with_inherited(|params, _| params) from_fn_async(configure_impl)
.no_cli(), .with_inherited(|params, _| params)
) .no_display()
.with_call_remote::<CliContext>(),
)
.subcommand(
"dry",
from_fn_async(configure_dry)
.with_inherited(|params, _| params)
.with_display_serializable()
.with_call_remote::<CliContext>(),
)
} }
pub async fn configure_impl( pub async fn configure_impl(
@@ -105,6 +114,17 @@ pub async fn configure_impl(
Ok(()) Ok(())
} }
pub async fn configure_dry(
ctx: RpcContext,
_: Empty,
ConfigureParams {
dependent_id,
dependency_id,
}: ConfigureParams,
) -> Result<ConfigDryRes, Error> {
configure_logic(ctx.clone(), (dependent_id, dependency_id.clone())).await
}
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ConfigDryRes { pub struct ConfigDryRes {

View File

@@ -168,7 +168,7 @@ pub async fn create_all_fs<P: AsRef<Path>>(
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn unmount_fs<P: AsRef<Path>>(guid: &str, datadir: P, name: &str) -> Result<(), Error> { pub async fn unmount_fs<P: AsRef<Path>>(guid: &str, datadir: P, name: &str) -> Result<(), Error> {
unmount(datadir.as_ref().join(name)).await?; unmount(datadir.as_ref().join(name), false).await?;
if !guid.ends_with("_UNENC") { if !guid.ends_with("_UNENC") {
Command::new("cryptsetup") Command::new("cryptsetup")
.arg("-q") .arg("-q")

View File

@@ -74,7 +74,7 @@ impl MountGuard {
} }
pub async fn unmount(mut self, delete_mountpoint: bool) -> Result<(), Error> { pub async fn unmount(mut self, delete_mountpoint: bool) -> Result<(), Error> {
if self.mounted { if self.mounted {
unmount(&self.mountpoint).await?; unmount(&self.mountpoint, false).await?;
if delete_mountpoint { if delete_mountpoint {
match tokio::fs::remove_dir(&self.mountpoint).await { match tokio::fs::remove_dir(&self.mountpoint).await {
Err(e) if e.raw_os_error() == Some(39) => Ok(()), // directory not empty Err(e) if e.raw_os_error() == Some(39) => Ok(()), // directory not empty
@@ -96,7 +96,7 @@ impl Drop for MountGuard {
fn drop(&mut self) { fn drop(&mut self) {
if self.mounted { if self.mounted {
let mountpoint = std::mem::take(&mut self.mountpoint); let mountpoint = std::mem::take(&mut self.mountpoint);
tokio::spawn(async move { unmount(mountpoint).await.unwrap() }); tokio::spawn(async move { unmount(mountpoint, true).await.unwrap() });
} }
} }
} }

View File

@@ -23,7 +23,7 @@ pub async fn bind<P0: AsRef<Path>, P1: AsRef<Path>>(
.status() .status()
.await?; .await?;
if is_mountpoint.success() { if is_mountpoint.success() {
unmount(dst.as_ref()).await?; unmount(dst.as_ref(), true).await?;
} }
tokio::fs::create_dir_all(&src).await?; tokio::fs::create_dir_all(&src).await?;
tokio::fs::create_dir_all(&dst).await?; tokio::fs::create_dir_all(&dst).await?;
@@ -41,11 +41,14 @@ pub async fn bind<P0: AsRef<Path>, P1: AsRef<Path>>(
} }
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn unmount<P: AsRef<Path>>(mountpoint: P) -> Result<(), Error> { pub async fn unmount<P: AsRef<Path>>(mountpoint: P, lazy: bool) -> Result<(), Error> {
tracing::debug!("Unmounting {}.", mountpoint.as_ref().display()); tracing::debug!("Unmounting {}.", mountpoint.as_ref().display());
tokio::process::Command::new("umount") let mut cmd = tokio::process::Command::new("umount");
.arg("-Rl") cmd.arg("-R");
.arg(mountpoint.as_ref()) if lazy {
cmd.arg("-l");
}
cmd.arg(mountpoint.as_ref())
.invoke(crate::ErrorKind::Filesystem) .invoke(crate::ErrorKind::Filesystem)
.await?; .await?;
Ok(()) Ok(())

View File

@@ -13,8 +13,8 @@ use crate::util::Invoke;
use crate::PLATFORM; use crate::PLATFORM;
/// Part of the Firmware, look there for more about /// Part of the Firmware, look there for more about
#[derive(Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "kebab-case")]
pub struct VersionMatcher { pub struct VersionMatcher {
/// Strip this prefix on the version matcher /// Strip this prefix on the version matcher
semver_prefix: Option<String>, semver_prefix: Option<String>,
@@ -27,8 +27,8 @@ pub struct VersionMatcher {
/// Inside a file that is firmware.json, we /// Inside a file that is firmware.json, we
/// wanted a structure that could help decide what to do /// wanted a structure that could help decide what to do
/// for each of the firmware versions /// for each of the firmware versions
#[derive(Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "kebab-case")]
pub struct Firmware { pub struct Firmware {
id: String, id: String,
/// This is the platform(s) the firmware was built for /// This is the platform(s) the firmware was built for
@@ -49,6 +49,7 @@ pub fn display_firmware_update_result(result: RequiresReboot) {
} }
} }
#[instrument]
pub async fn check_for_firmware_update() -> Result<Option<Firmware>, Error> { pub async fn check_for_firmware_update() -> Result<Option<Firmware>, Error> {
let system_product_name = String::from_utf8( let system_product_name = String::from_utf8(
Command::new("dmidecode") Command::new("dmidecode")
@@ -118,6 +119,7 @@ pub async fn check_for_firmware_update() -> Result<Option<Firmware>, Error> {
/// that the firmware was the correct and updated for /// that the firmware was the correct and updated for
/// systems like the Pure System that a new firmware /// systems like the Pure System that a new firmware
/// was released and the updates where pushed through the pure os. /// was released and the updates where pushed through the pure os.
#[instrument]
pub async fn update_firmware(firmware: Firmware) -> Result<(), Error> { pub async fn update_firmware(firmware: Firmware) -> Result<(), Error> {
let id = &firmware.id; let id = &firmware.id;
let firmware_dir = Path::new("/usr/lib/startos/firmware"); let firmware_dir = Path::new("/usr/lib/startos/firmware");

View File

@@ -65,7 +65,7 @@ pub async fn init_postgres(datadir: impl AsRef<Path>) -> Result<(), Error> {
.await? .await?
.success() .success()
{ {
unmount("/var/lib/postgresql").await?; unmount("/var/lib/postgresql", true).await?;
} }
let exists = tokio::fs::metadata(&db_dir).await.is_ok(); let exists = tokio::fs::metadata(&db_dir).await.is_ok();
if !exists { if !exists {
@@ -235,7 +235,7 @@ impl InitPhases {
sync_clock: handle.add_phase("Synchronizing system clock".into(), Some(10)), sync_clock: handle.add_phase("Synchronizing system clock".into(), Some(10)),
enable_zram: handle.add_phase("Enabling ZRAM".into(), Some(1)), enable_zram: handle.add_phase("Enabling ZRAM".into(), Some(1)),
update_server_info: handle.add_phase("Updating server info".into(), Some(1)), update_server_info: handle.add_phase("Updating server info".into(), Some(1)),
launch_service_network: handle.add_phase("Launching service intranet".into(), Some(10)), launch_service_network: handle.add_phase("Launching service intranet".into(), Some(1)),
run_migrations: handle.add_phase("Running migrations".into(), Some(10)), run_migrations: handle.add_phase("Running migrations".into(), Some(10)),
validate_db: handle.add_phase("Validating database".into(), Some(1)), validate_db: handle.add_phase("Validating database".into(), Some(1)),
postinit: if Path::new("/media/startos/config/postinit.sh").exists() { postinit: if Path::new("/media/startos/config/postinit.sh").exists() {
@@ -398,6 +398,20 @@ pub async fn init(
Command::new("update-ca-certificates") Command::new("update-ca-certificates")
.invoke(crate::ErrorKind::OpenSsl) .invoke(crate::ErrorKind::OpenSsl)
.await?; .await?;
if tokio::fs::metadata("/home/kiosk/profile").await.is_ok() {
Command::new("certutil")
.arg("-A")
.arg("-n")
.arg("StartOS Local Root CA")
.arg("-t")
.arg("TCu,Cuw,Tuw")
.arg("-i")
.arg("/usr/local/share/ca-certificates/startos-root-ca.crt")
.arg("-d")
.arg("/home/kiosk/fx-profile")
.invoke(ErrorKind::OpenSsl)
.await?;
}
load_ca_cert.complete(); load_ca_cert.complete();
load_wifi.start(); load_wifi.start();
@@ -422,6 +436,12 @@ pub async fn init(
tokio::fs::remove_dir_all(&tmp_var).await?; tokio::fs::remove_dir_all(&tmp_var).await?;
} }
crate::disk::mount::util::bind(&tmp_var, "/var/tmp", false).await?; crate::disk::mount::util::bind(&tmp_var, "/var/tmp", false).await?;
let downloading = cfg
.datadir()
.join(format!("package-data/archive/downloading"));
if tokio::fs::metadata(&downloading).await.is_ok() {
tokio::fs::remove_dir_all(&downloading).await?;
}
let tmp_docker = cfg let tmp_docker = cfg
.datadir() .datadir()
.join(format!("package-data/tmp/{CONTAINER_TOOL}")); .join(format!("package-data/tmp/{CONTAINER_TOOL}"));

View File

@@ -12,7 +12,7 @@ use itertools::Itertools;
use models::VersionString; use models::VersionString;
use reqwest::header::{HeaderMap, CONTENT_LENGTH}; use reqwest::header::{HeaderMap, CONTENT_LENGTH};
use reqwest::Url; use reqwest::Url;
use rpc_toolkit::yajrc::RpcError; use rpc_toolkit::yajrc::{GenericRpcMethod, RpcError};
use rpc_toolkit::HandlerArgs; use rpc_toolkit::HandlerArgs;
use rustyline_async::ReadlineEvent; use rustyline_async::ReadlineEvent;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -157,7 +157,7 @@ pub async fn install(
.services .services
.install( .install(
ctx.clone(), ctx.clone(),
|| asset.deserialize_s9pk(ctx.client.clone()), || asset.deserialize_s9pk_buffered(ctx.client.clone()),
None::<Never>, None::<Never>,
None, None,
) )
@@ -202,11 +202,12 @@ pub async fn sideload(
use axum::extract::ws::Message; use axum::extract::ws::Message;
async move { async move {
if let Err(e) = async { if let Err(e) = async {
type RpcResponse = rpc_toolkit::yajrc::RpcResponse::<GenericRpcMethod<&'static str, (), FullProgress>>;
tokio::select! { tokio::select! {
res = async { res = async {
while let Some(progress) = progress_listener.next().await { while let Some(progress) = progress_listener.next().await {
ws.send(Message::Text( ws.send(Message::Text(
serde_json::to_string(&Ok::<_, ()>(progress)) serde_json::to_string(&RpcResponse::from_result::<RpcError>(Ok(progress)))
.with_kind(ErrorKind::Serialization)?, .with_kind(ErrorKind::Serialization)?,
)) ))
.await .await
@@ -217,7 +218,7 @@ pub async fn sideload(
err = err_recv => { err = err_recv => {
if let Ok(e) = err { if let Ok(e) = err {
ws.send(Message::Text( ws.send(Message::Text(
serde_json::to_string(&Err::<(), _>(e)) serde_json::to_string(&RpcResponse::from_result::<RpcError>(Err(e)))
.with_kind(ErrorKind::Serialization)?, .with_kind(ErrorKind::Serialization)?,
)) ))
.await .await
@@ -406,14 +407,18 @@ pub async fn cli_install(
let mut progress = FullProgress::new(); let mut progress = FullProgress::new();
type RpcResponse = rpc_toolkit::yajrc::RpcResponse<
GenericRpcMethod<&'static str, (), FullProgress>,
>;
loop { loop {
tokio::select! { tokio::select! {
msg = ws.next() => { msg = ws.next() => {
if let Some(msg) = msg { if let Some(msg) = msg {
if let Message::Text(t) = msg.with_kind(ErrorKind::Network)? { if let Message::Text(t) = msg.with_kind(ErrorKind::Network)? {
progress = progress =
serde_json::from_str::<Result<_, RpcError>>(&t) serde_json::from_str::<RpcResponse>(&t)
.with_kind(ErrorKind::Deserialization)??; .with_kind(ErrorKind::Deserialization)?.result?;
bar.update(&progress); bar.update(&progress);
} }
} else { } else {

View File

@@ -184,7 +184,13 @@ fn deserialize_log_message<'de, D: serde::de::Deserializer<'de>>(
where where
E: serde::de::Error, E: serde::de::Error,
{ {
Ok(v.trim().to_owned()) Ok(v.to_owned())
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(v)
} }
fn visit_unit<E>(self) -> Result<Self::Value, E> fn visit_unit<E>(self) -> Result<Self::Value, E>
where where
@@ -202,7 +208,7 @@ fn deserialize_log_message<'de, D: serde::de::Deserializer<'de>>(
.flatten() .flatten()
.collect::<Result<Vec<u8>, _>>()?, .collect::<Result<Vec<u8>, _>>()?,
) )
.map(|s| s.trim().to_owned()) .map(|s| s.to_owned())
.map_err(serde::de::Error::custom) .map_err(serde::de::Error::custom)
} }
} }

View File

@@ -123,7 +123,11 @@ impl LxcManager {
if !expected.contains(&ContainerId::try_from(container)?) { if !expected.contains(&ContainerId::try_from(container)?) {
let rootfs_path = Path::new(LXC_CONTAINER_DIR).join(container).join("rootfs"); let rootfs_path = Path::new(LXC_CONTAINER_DIR).join(container).join("rootfs");
if tokio::fs::metadata(&rootfs_path).await.is_ok() { if tokio::fs::metadata(&rootfs_path).await.is_ok() {
unmount(Path::new(LXC_CONTAINER_DIR).join(container).join("rootfs")).await?; unmount(
Path::new(LXC_CONTAINER_DIR).join(container).join("rootfs"),
true,
)
.await?;
if tokio_stream::wrappers::ReadDirStream::new( if tokio_stream::wrappers::ReadDirStream::new(
tokio::fs::read_dir(&rootfs_path).await?, tokio::fs::read_dir(&rootfs_path).await?,
) )
@@ -284,6 +288,11 @@ impl LxcContainer {
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn exit(mut self) -> Result<(), Error> { pub async fn exit(mut self) -> Result<(), Error> {
Command::new("lxc-stop")
.arg("--name")
.arg(&**self.guid)
.invoke(ErrorKind::Lxc)
.await?;
self.rpc_bind.take().unmount().await?; self.rpc_bind.take().unmount().await?;
if let Some(log_mount) = self.log_mount.take() { if let Some(log_mount) = self.log_mount.take() {
log_mount.unmount(true).await?; log_mount.unmount(true).await?;

View File

@@ -51,6 +51,11 @@ impl HasLoggedOutSessions {
for sid in &to_log_out { for sid in &to_log_out {
ctx.open_authed_continuations.kill(sid) ctx.open_authed_continuations.kill(sid)
} }
ctx.ephemeral_sessions.mutate(|s| {
for sid in &to_log_out {
s.0.remove(sid);
}
});
ctx.db ctx.db
.mutate(|db| { .mutate(|db| {
let sessions = db.as_private_mut().as_sessions_mut(); let sessions = db.as_private_mut().as_sessions_mut();
@@ -110,20 +115,29 @@ impl HasValidSession {
ctx: &RpcContext, ctx: &RpcContext,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
let session_hash = session_token.hashed(); let session_hash = session_token.hashed();
ctx.db if !ctx.ephemeral_sessions.mutate(|s| {
.mutate(|db| { if let Some(session) = s.0.get_mut(session_hash) {
db.as_private_mut() session.last_active = Utc::now();
.as_sessions_mut() true
.as_idx_mut(session_hash) } else {
.ok_or_else(|| { false
Error::new(eyre!("UNAUTHORIZED"), crate::ErrorKind::Authorization) }
})? }) {
.mutate(|s| { ctx.db
s.last_active = Utc::now(); .mutate(|db| {
Ok(()) db.as_private_mut()
}) .as_sessions_mut()
}) .as_idx_mut(session_hash)
.await?; .ok_or_else(|| {
Error::new(eyre!("UNAUTHORIZED"), crate::ErrorKind::Authorization)
})?
.mutate(|s| {
s.last_active = Utc::now();
Ok(())
})
})
.await?;
}
Ok(Self(SessionType::Session(session_token))) Ok(Self(SessionType::Session(session_token)))
} }

View File

@@ -36,10 +36,11 @@ impl Middleware<RpcContext> for SyncDb {
async fn process_http_response(&mut self, context: &RpcContext, response: &mut Response) { async fn process_http_response(&mut self, context: &RpcContext, response: &mut Response) {
if let Err(e) = async { if let Err(e) = async {
if self.sync_db { if self.sync_db {
response.headers_mut().append( let id = context.db.sequence().await;
"X-Patch-Sequence", response
HeaderValue::from_str(&context.db.sequence().await.to_string())?, .headers_mut()
); .append("X-Patch-Sequence", HeaderValue::from_str(&id.to_string())?);
context.sync_db.send_replace(id);
} }
Ok::<_, InvalidHeaderValue>(()) Ok::<_, InvalidHeaderValue>(())
} }

View File

@@ -29,6 +29,7 @@ pub struct PreInitNetController {
tor: TorController, tor: TorController,
vhost: VHostController, vhost: VHostController,
os_bindings: Vec<Arc<()>>, os_bindings: Vec<Arc<()>>,
server_hostnames: Vec<Option<InternedString>>,
} }
impl PreInitNetController { impl PreInitNetController {
#[instrument(skip_all)] #[instrument(skip_all)]
@@ -44,6 +45,7 @@ impl PreInitNetController {
tor: TorController::new(tor_control, tor_socks), tor: TorController::new(tor_control, tor_socks),
vhost: VHostController::new(db), vhost: VHostController::new(db),
os_bindings: Vec::new(), os_bindings: Vec::new(),
server_hostnames: Vec::new(),
}; };
res.add_os_bindings(hostname, os_tor_key).await?; res.add_os_bindings(hostname, os_tor_key).await?;
Ok(res) Ok(res)
@@ -59,64 +61,26 @@ impl PreInitNetController {
MaybeUtf8String("h2".into()), MaybeUtf8String("h2".into()),
])); ]));
// Internal DNS self.server_hostnames = vec![
self.vhost // LAN IP
.add( None,
Some("embassy".into()), // Internal DNS
443, Some("embassy".into()),
([127, 0, 0, 1], 80).into(), Some("startos".into()),
alpn.clone(), // localhost
) Some("localhost".into()),
.await?; Some(hostname.no_dot_host_name()),
self.vhost // LAN mDNS
.add( Some(hostname.local_domain_name()),
Some("startos".into()), ];
443,
([127, 0, 0, 1], 80).into(),
alpn.clone(),
)
.await?;
// LAN IP for hostname in self.server_hostnames.iter().cloned() {
self.os_bindings.push( self.os_bindings.push(
self.vhost self.vhost
.add(None, 443, ([127, 0, 0, 1], 80).into(), alpn.clone()) .add(hostname, 443, ([127, 0, 0, 1], 80).into(), alpn.clone())
.await?, .await?,
); );
}
// localhost
self.os_bindings.push(
self.vhost
.add(
Some("localhost".into()),
443,
([127, 0, 0, 1], 80).into(),
alpn.clone(),
)
.await?,
);
self.os_bindings.push(
self.vhost
.add(
Some(hostname.no_dot_host_name()),
443,
([127, 0, 0, 1], 80).into(),
alpn.clone(),
)
.await?,
);
// LAN mDNS
self.os_bindings.push(
self.vhost
.add(
Some(hostname.local_domain_name()),
443,
([127, 0, 0, 1], 80).into(),
alpn.clone(),
)
.await?,
);
// Tor // Tor
self.os_bindings.push( self.os_bindings.push(
@@ -154,6 +118,7 @@ pub struct NetController {
pub(super) dns: DnsController, pub(super) dns: DnsController,
pub(super) forward: LanPortForwardController, pub(super) forward: LanPortForwardController,
pub(super) os_bindings: Vec<Arc<()>>, pub(super) os_bindings: Vec<Arc<()>>,
pub(super) server_hostnames: Vec<Option<InternedString>>,
} }
impl NetController { impl NetController {
@@ -163,6 +128,7 @@ impl NetController {
tor, tor,
vhost, vhost,
os_bindings, os_bindings,
server_hostnames,
}: PreInitNetController, }: PreInitNetController,
dns_bind: &[SocketAddr], dns_bind: &[SocketAddr],
) -> Result<Self, Error> { ) -> Result<Self, Error> {
@@ -173,6 +139,7 @@ impl NetController {
dns: DnsController::init(dns_bind).await?, dns: DnsController::init(dns_bind).await?,
forward: LanPortForwardController::new(), forward: LanPortForwardController::new(),
os_bindings, os_bindings,
server_hostnames,
}; };
res.os_bindings res.os_bindings
.push(res.dns.add(None, HOST_IP.into()).await?); .push(res.dns.add(None, HOST_IP.into()).await?);
@@ -258,10 +225,15 @@ impl NetService {
let ctrl = self.net_controller()?; let ctrl = self.net_controller()?;
let mut errors = ErrorCollection::new(); let mut errors = ErrorCollection::new();
for (_, binds) in std::mem::take(&mut self.binds) { for (_, binds) in std::mem::take(&mut self.binds) {
for (_, (lan, _, _, rc)) in binds.lan { for (_, (lan, _, hostnames, rc)) in binds.lan {
drop(rc); drop(rc);
if let Some(external) = lan.assigned_ssl_port { if let Some(external) = lan.assigned_ssl_port {
ctrl.vhost.gc(None, external).await?; for hostname in ctrl.server_hostnames.iter().cloned() {
ctrl.vhost.gc(hostname, external).await?;
}
for hostname in hostnames {
ctrl.vhost.gc(Some(hostname), external).await?;
}
} }
if let Some(external) = lan.assigned_port { if let Some(external) = lan.assigned_port {
ctrl.forward.gc(external).await?; ctrl.forward.gc(external).await?;
@@ -317,11 +289,13 @@ impl NetService {
Err(AlpnInfo::Reflect) Err(AlpnInfo::Reflect)
} }
}; };
rcs.push( for hostname in ctrl.server_hostnames.iter().cloned() {
ctrl.vhost rcs.push(
.add(None, external, target, connect_ssl.clone()) ctrl.vhost
.await?, .add(hostname, external, target, connect_ssl.clone())
); .await?,
);
}
for address in host.addresses() { for address in host.addresses() {
match address { match address {
HostAddress::Onion { address } => { HostAddress::Onion { address } => {
@@ -407,7 +381,9 @@ impl NetService {
} }
if let Some((lan, _, hostnames, _)) = old_lan_bind { if let Some((lan, _, hostnames, _)) = old_lan_bind {
if let Some(external) = lan.assigned_ssl_port { if let Some(external) = lan.assigned_ssl_port {
ctrl.vhost.gc(None, external).await?; for hostname in ctrl.server_hostnames.iter().cloned() {
ctrl.vhost.gc(hostname, external).await?;
}
for hostname in hostnames { for hostname in hostnames {
ctrl.vhost.gc(Some(hostname), external).await?; ctrl.vhost.gc(Some(hostname), external).await?;
} }
@@ -429,7 +405,9 @@ impl NetService {
}); });
for (lan, hostnames) in removed { for (lan, hostnames) in removed {
if let Some(external) = lan.assigned_ssl_port { if let Some(external) = lan.assigned_ssl_port {
ctrl.vhost.gc(None, external).await?; for hostname in ctrl.server_hostnames.iter().cloned() {
ctrl.vhost.gc(hostname, external).await?;
}
for hostname in hostnames { for hostname in hostnames {
ctrl.vhost.gc(Some(hostname), external).await?; ctrl.vhost.gc(Some(hostname), external).await?;
} }
@@ -533,7 +511,9 @@ impl NetService {
pub async fn remove_all(mut self) -> Result<(), Error> { pub async fn remove_all(mut self) -> Result<(), Error> {
self.shutdown = true; self.shutdown = true;
if let Some(ctrl) = Weak::upgrade(&self.controller) { if let Some(ctrl) = Weak::upgrade(&self.controller) {
self.clear_bindings().await self.clear_bindings().await?;
drop(ctrl);
Ok(())
} else { } else {
tracing::warn!("NetService dropped after NetController is shutdown"); tracing::warn!("NetService dropped after NetController is shutdown");
Err(Error::new( Err(Error::new(

View File

@@ -272,9 +272,8 @@ fn s9pk_router(ctx: RpcContext) -> Router {
.route("/installed/:s9pk/*path", { .route("/installed/:s9pk/*path", {
let ctx = ctx.clone(); let ctx = ctx.clone();
any( any(
|x::Path(s9pk): x::Path<String>, |x::Path((s9pk, path)): x::Path<(String, PathBuf)>,
x::Path(path): x::Path<PathBuf>, x::RawQuery(query): x::RawQuery,
x::Query(commitment): x::Query<Option<MerkleArchiveCommitment>>,
request: Request| async move { request: Request| async move {
if_authorized(&ctx, request, |request| async { if_authorized(&ctx, request, |request| async {
let s9pk = S9pk::deserialize( let s9pk = S9pk::deserialize(
@@ -287,7 +286,12 @@ fn s9pk_router(ctx: RpcContext) -> Router {
) )
.await?, .await?,
), ),
commitment.as_ref(), query
.as_deref()
.map(MerkleArchiveCommitment::from_query)
.and_then(|a| a.transpose())
.transpose()?
.as_ref(),
) )
.await?; .await?;
let (parts, _) = request.into_parts(); let (parts, _) = request.into_parts();

View File

@@ -13,6 +13,7 @@ use http::Uri;
use imbl_value::InternedString; use imbl_value::InternedString;
use models::ResultExt; use models::ResultExt;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::io::AsyncWriteExt;
use tokio::net::{TcpListener, TcpStream}; use tokio::net::{TcpListener, TcpStream};
use tokio::sync::{Mutex, RwLock}; use tokio::sync::{Mutex, RwLock};
use tokio_rustls::rustls::pki_types::{ use tokio_rustls::rustls::pki_types::{
@@ -27,7 +28,7 @@ use ts_rs::TS;
use crate::db::model::Database; use crate::db::model::Database;
use crate::net::static_server::server_error; use crate::net::static_server::server_error;
use crate::prelude::*; use crate::prelude::*;
use crate::util::io::BackTrackingReader; use crate::util::io::BackTrackingIO;
use crate::util::serde::MaybeUtf8String; use crate::util::serde::MaybeUtf8String;
// not allowed: <=1024, >=32768, 5355, 5432, 9050, 6010, 9051, 5353 // not allowed: <=1024, >=32768, 5355, 5432, 9050, 6010, 9051, 5353
@@ -129,8 +130,7 @@ impl VHostServer {
tracing::debug!("{e:?}"); tracing::debug!("{e:?}");
} }
let mut stream = BackTrackingReader::new(stream); let mut stream = BackTrackingIO::new(stream);
stream.start_buffering();
let mapping = mapping.clone(); let mapping = mapping.clone();
let db = db.clone(); let db = db.clone();
tokio::spawn(async move { tokio::spawn(async move {
@@ -156,6 +156,7 @@ impl VHostServer {
.and_then(|host| host.to_str().ok()); .and_then(|host| host.to_str().ok());
let uri = Uri::from_parts({ let uri = Uri::from_parts({
let mut parts = req.uri().to_owned().into_parts(); let mut parts = req.uri().to_owned().into_parts();
parts.scheme = Some("https".parse()?);
parts.authority = host.map(FromStr::from_str).transpose()?; parts.authority = host.map(FromStr::from_str).transpose()?;
parts parts
})?; })?;
@@ -313,8 +314,12 @@ impl VHostServer {
) )
.await .await
.with_kind(crate::ErrorKind::OpenSsl)?; .with_kind(crate::ErrorKind::OpenSsl)?;
let mut accept = mid.into_stream(Arc::new(cfg));
let io = accept.get_mut().unwrap();
let buffered = io.stop_buffering();
io.write_all(&buffered).await?;
let mut tls_stream = let mut tls_stream =
match mid.into_stream(Arc::new(cfg)).await { match accept.await {
Ok(a) => a, Ok(a) => a,
Err(e) => { Err(e) => {
tracing::trace!( "VHostController: failed to accept TLS connection on port {port}: {e}"); tracing::trace!( "VHostController: failed to accept TLS connection on port {port}: {e}");
@@ -322,7 +327,6 @@ impl VHostServer {
return Ok(()) return Ok(())
} }
}; };
tls_stream.get_mut().0.stop_buffering();
tokio::io::copy_bidirectional( tokio::io::copy_bidirectional(
&mut tls_stream, &mut tls_stream,
&mut target_stream, &mut target_stream,
@@ -335,8 +339,12 @@ impl VHostServer {
{ {
cfg.alpn_protocols.push(proto.into()); cfg.alpn_protocols.push(proto.into());
} }
let mut accept = mid.into_stream(Arc::new(cfg));
let io = accept.get_mut().unwrap();
let buffered = io.stop_buffering();
io.write_all(&buffered).await?;
let mut tls_stream = let mut tls_stream =
match mid.into_stream(Arc::new(cfg)).await { match accept.await {
Ok(a) => a, Ok(a) => a,
Err(e) => { Err(e) => {
tracing::trace!( "VHostController: failed to accept TLS connection on port {port}: {e}"); tracing::trace!( "VHostController: failed to accept TLS connection on port {port}: {e}");
@@ -344,7 +352,6 @@ impl VHostServer {
return Ok(()) return Ok(())
} }
}; };
tls_stream.get_mut().0.stop_buffering();
tokio::io::copy_bidirectional( tokio::io::copy_bidirectional(
&mut tls_stream, &mut tls_stream,
&mut tcp_stream, &mut tcp_stream,
@@ -353,8 +360,12 @@ impl VHostServer {
} }
Err(AlpnInfo::Specified(alpn)) => { Err(AlpnInfo::Specified(alpn)) => {
cfg.alpn_protocols = alpn.into_iter().map(|a| a.0).collect(); cfg.alpn_protocols = alpn.into_iter().map(|a| a.0).collect();
let mut accept = mid.into_stream(Arc::new(cfg));
let io = accept.get_mut().unwrap();
let buffered = io.stop_buffering();
io.write_all(&buffered).await?;
let mut tls_stream = let mut tls_stream =
match mid.into_stream(Arc::new(cfg)).await { match accept.await {
Ok(a) => a, Ok(a) => a,
Err(e) => { Err(e) => {
tracing::trace!( "VHostController: failed to accept TLS connection on port {port}: {e}"); tracing::trace!( "VHostController: failed to accept TLS connection on port {port}: {e}");
@@ -362,7 +373,6 @@ impl VHostServer {
return Ok(()) return Ok(())
} }
}; };
tls_stream.get_mut().0.stop_buffering();
tokio::io::copy_bidirectional( tokio::io::copy_bidirectional(
&mut tls_stream, &mut tls_stream,
&mut tcp_stream, &mut tcp_stream,

View File

@@ -18,7 +18,9 @@ use crate::prelude::*;
lazy_static::lazy_static! { lazy_static::lazy_static! {
static ref SPINNER: ProgressStyle = ProgressStyle::with_template("{spinner} {msg}...").unwrap(); static ref SPINNER: ProgressStyle = ProgressStyle::with_template("{spinner} {msg}...").unwrap();
static ref PERCENTAGE: ProgressStyle = ProgressStyle::with_template("{msg} {percent}% {wide_bar} [{bytes}/{total_bytes}] [{binary_bytes_per_sec} {eta}]").unwrap(); static ref PERCENTAGE: ProgressStyle = ProgressStyle::with_template("{msg} {percent}% {wide_bar} [{human_pos}/{human_len}] [{per_sec} {eta}]").unwrap();
static ref PERCENTAGE_BYTES: ProgressStyle = ProgressStyle::with_template("{msg} {percent}% {wide_bar} [{binary_bytes}/{binary_total_bytes}] [{binary_bytes_per_sec} {eta}]").unwrap();
static ref STEPS: ProgressStyle = ProgressStyle::with_template("{spinner} {wide_msg} [{human_pos}/?] [{per_sec} {elapsed}]").unwrap();
static ref BYTES: ProgressStyle = ProgressStyle::with_template("{spinner} {wide_msg} [{bytes}/?] [{binary_bytes_per_sec} {elapsed}]").unwrap(); static ref BYTES: ProgressStyle = ProgressStyle::with_template("{spinner} {wide_msg} [{bytes}/?] [{binary_bytes_per_sec} {elapsed}]").unwrap();
} }
@@ -38,7 +40,7 @@ impl Progress {
pub fn new() -> Self { pub fn new() -> Self {
Progress::NotStarted(()) Progress::NotStarted(())
} }
pub fn update_bar(self, bar: &ProgressBar) { pub fn update_bar(self, bar: &ProgressBar, bytes: bool) {
match self { match self {
Self::NotStarted(()) => { Self::NotStarted(()) => {
bar.set_style(SPINNER.clone()); bar.set_style(SPINNER.clone());
@@ -51,7 +53,11 @@ impl Progress {
bar.finish(); bar.finish();
} }
Self::Progress { done, total: None } => { Self::Progress { done, total: None } => {
bar.set_style(BYTES.clone()); if bytes {
bar.set_style(BYTES.clone());
} else {
bar.set_style(STEPS.clone());
}
bar.set_position(done); bar.set_position(done);
bar.tick(); bar.tick();
} }
@@ -59,7 +65,11 @@ impl Progress {
done, done,
total: Some(total), total: Some(total),
} => { } => {
bar.set_style(PERCENTAGE.clone()); if bytes {
bar.set_style(PERCENTAGE_BYTES.clone());
} else {
bar.set_style(PERCENTAGE.clone());
}
bar.set_position(done); bar.set_position(done);
bar.set_length(total); bar.set_length(total);
bar.tick(); bar.tick();
@@ -490,7 +500,7 @@ impl PhasedProgressBar {
); );
} }
} }
progress.overall.update_bar(&self.overall); progress.overall.update_bar(&self.overall, false);
for (name, bar) in self.phases.iter() { for (name, bar) in self.phases.iter() {
if let Some(progress) = progress.phases.iter().find_map(|p| { if let Some(progress) = progress.phases.iter().find_map(|p| {
if &p.name == name { if &p.name == name {
@@ -499,7 +509,7 @@ impl PhasedProgressBar {
None None
} }
}) { }) {
progress.update_bar(bar); progress.update_bar(bar, true);
} }
} }
} }

View File

@@ -2,6 +2,7 @@ use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use helpers::NonDetachingJoinHandle;
use reqwest::Client; use reqwest::Client;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::io::AsyncWrite; use tokio::io::AsyncWrite;
@@ -14,8 +15,9 @@ use crate::registry::signer::commitment::{Commitment, Digestable};
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey}; use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey};
use crate::registry::signer::AcceptSigners; use crate::registry::signer::AcceptSigners;
use crate::s9pk::merkle_archive::source::http::HttpSource; use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::merkle_archive::source::Section; use crate::s9pk::merkle_archive::source::{ArchiveSource, Section};
use crate::s9pk::S9pk; use crate::s9pk::S9pk;
use crate::upload::UploadingFile;
#[derive(Debug, Deserialize, Serialize, TS)] #[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@@ -70,4 +72,42 @@ impl RegistryAsset<MerkleArchiveCommitment> {
) )
.await .await
} }
pub async fn deserialize_s9pk_buffered(
&self,
client: Client,
) -> Result<S9pk<Section<Arc<BufferedHttpSource>>>, Error> {
S9pk::deserialize(
&Arc::new(BufferedHttpSource::new(client, self.url.clone()).await?),
Some(&self.commitment),
)
.await
}
}
pub struct BufferedHttpSource {
_download: NonDetachingJoinHandle<()>,
file: UploadingFile,
}
impl BufferedHttpSource {
pub async fn new(client: Client, url: Url) -> Result<Self, Error> {
let (mut handle, file) = UploadingFile::new().await?;
let response = client.get(url).send().await?;
Ok(Self {
_download: tokio::spawn(async move { handle.download(response).await }).into(),
file,
})
}
}
impl ArchiveSource for BufferedHttpSource {
type FetchReader = <UploadingFile as ArchiveSource>::FetchReader;
type FetchAllReader = <UploadingFile as ArchiveSource>::FetchAllReader;
async fn size(&self) -> Option<u64> {
self.file.size().await
}
async fn fetch_all(&self) -> Result<Self::FetchAllReader, Error> {
self.file.fetch_all().await
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::FetchReader, Error> {
self.file.fetch(position, size).await
}
} }

View File

@@ -38,7 +38,7 @@ impl ServiceActorSeed {
) )
.await .await
.with_kind(ErrorKind::Dependency) .with_kind(ErrorKind::Dependency)
.map(|res| res.filter(|c| !c.is_empty())) .map(|res| res.filter(|c| !c.is_empty() && Some(c) != remote_config.as_ref()))
} }
} }

View File

@@ -7,14 +7,20 @@ use exver::VersionRange;
use itertools::Itertools; use itertools::Itertools;
use models::{HealthCheckId, PackageId, VolumeId}; use models::{HealthCheckId, PackageId, VolumeId};
use patch_db::json_ptr::JsonPointer; use patch_db::json_ptr::JsonPointer;
use tokio::process::Command;
use crate::db::model::package::{ use crate::db::model::package::{
CurrentDependencies, CurrentDependencyInfo, CurrentDependencyKind, ManifestPreference, CurrentDependencies, CurrentDependencyInfo, CurrentDependencyKind, ManifestPreference,
}; };
use crate::disk::mount::filesystem::bind::Bind;
use crate::disk::mount::filesystem::idmapped::IdMapped;
use crate::disk::mount::filesystem::{FileSystem, MountType};
use crate::rpc_continuations::Guid; use crate::rpc_continuations::Guid;
use crate::service::effects::prelude::*; use crate::service::effects::prelude::*;
use crate::status::health_check::HealthCheckResult; use crate::status::health_check::HealthCheckResult;
use crate::util::clap::FromStrParser; use crate::util::clap::FromStrParser;
use crate::util::Invoke;
use crate::volume::data_dir;
#[derive(Debug, Clone, Serialize, Deserialize, TS)] #[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)] #[ts(export)]
@@ -29,7 +35,7 @@ pub struct MountTarget {
#[ts(export)] #[ts(export)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct MountParams { pub struct MountParams {
location: String, location: PathBuf,
target: MountTarget, target: MountTarget,
} }
pub async fn mount( pub async fn mount(
@@ -45,8 +51,40 @@ pub async fn mount(
}, },
}: MountParams, }: MountParams,
) -> Result<(), Error> { ) -> Result<(), Error> {
// TODO let context = context.deref()?;
todo!() let subpath = subpath.unwrap_or_default();
let subpath = subpath.strip_prefix("/").unwrap_or(&subpath);
let source = data_dir(&context.seed.ctx.datadir, &package_id, &volume_id).join(subpath);
if tokio::fs::metadata(&source).await.is_err() {
tokio::fs::create_dir_all(&source).await?;
}
let location = location.strip_prefix("/").unwrap_or(&location);
let mountpoint = context
.seed
.persistent_container
.lxc_container
.get()
.or_not_found("lxc container")?
.rootfs_dir()
.join(location);
tokio::fs::create_dir_all(&mountpoint).await?;
Command::new("chown")
.arg("100000:100000")
.arg(&mountpoint)
.invoke(crate::ErrorKind::Filesystem)
.await?;
IdMapped::new(Bind::new(source), 0, 100000, 65536)
.mount(
mountpoint,
if readonly {
MountType::ReadOnly
} else {
MountType::ReadWrite
},
)
.await?;
Ok(())
} }
pub async fn get_installed_packages(context: EffectContext) -> Result<Vec<PackageId>, Error> { pub async fn get_installed_packages(context: EffectContext) -> Result<Vec<PackageId>, Error> {

View File

@@ -83,15 +83,16 @@ pub async fn destroy_overlayed_image(
DestroyOverlayedImageParams { guid }: DestroyOverlayedImageParams, DestroyOverlayedImageParams { guid }: DestroyOverlayedImageParams,
) -> Result<(), Error> { ) -> Result<(), Error> {
let context = context.deref()?; let context = context.deref()?;
if context if let Some(overlay) = context
.seed .seed
.persistent_container .persistent_container
.overlays .overlays
.lock() .lock()
.await .await
.remove(&guid) .remove(&guid)
.is_none()
{ {
overlay.unmount(true).await?;
} else {
tracing::warn!("Could not find a guard to remove on the destroy overlayed image; assumming that it already is removed and will be skipping"); tracing::warn!("Could not find a guard to remove on the destroy overlayed image; assumming that it already is removed and will be skipping");
} }
Ok(()) Ok(())

View File

@@ -29,9 +29,8 @@ use crate::s9pk::S9pk;
use crate::service::service_map::InstallProgressHandles; use crate::service::service_map::InstallProgressHandles;
use crate::status::health_check::HealthCheckResult; use crate::status::health_check::HealthCheckResult;
use crate::util::actor::concurrent::ConcurrentActor; use crate::util::actor::concurrent::ConcurrentActor;
use crate::util::actor::Actor;
use crate::util::io::create_file; use crate::util::io::create_file;
use crate::util::serde::Pem; use crate::util::serde::{NoOutput, Pem};
use crate::util::Never; use crate::util::Never;
use crate::volume::data_dir; use crate::volume::data_dir;
@@ -80,7 +79,7 @@ impl ServiceRef {
) -> Result<(), Error> { ) -> Result<(), Error> {
self.seed self.seed
.persistent_container .persistent_container
.execute( .execute::<NoOutput>(
Guid::new(), Guid::new(),
ProcedureName::Uninit, ProcedureName::Uninit,
to_value(&target_version)?, to_value(&target_version)?,
@@ -90,10 +89,60 @@ impl ServiceRef {
let id = self.seed.persistent_container.s9pk.as_manifest().id.clone(); let id = self.seed.persistent_container.s9pk.as_manifest().id.clone();
let ctx = self.seed.ctx.clone(); let ctx = self.seed.ctx.clone();
self.shutdown().await?; self.shutdown().await?;
if target_version.is_none() { if target_version.is_none() {
ctx.db if let Some(pde) = ctx
.mutate(|d| d.as_public_mut().as_package_data_mut().remove(&id)) .db
.await?; .mutate(|d| {
if let Some(pde) = d
.as_public_mut()
.as_package_data_mut()
.remove(&id)?
.map(|d| d.de())
.transpose()?
{
d.as_private_mut().as_available_ports_mut().mutate(|p| {
p.free(
pde.hosts
.0
.values()
.flat_map(|h| h.bindings.values())
.flat_map(|b| {
b.lan
.assigned_port
.into_iter()
.chain(b.lan.assigned_ssl_port)
}),
);
Ok(())
})?;
Ok(Some(pde))
} else {
Ok(None)
}
})
.await?
{
let state = pde.state_info.expect_removing()?;
for volume_id in &state.manifest.volumes {
let path = data_dir(&ctx.datadir, &state.manifest.id, volume_id);
if tokio::fs::metadata(&path).await.is_ok() {
tokio::fs::remove_dir_all(&path).await?;
}
}
let logs_dir = ctx.datadir.join("logs").join(&state.manifest.id);
if tokio::fs::metadata(&logs_dir).await.is_ok() {
tokio::fs::remove_dir_all(&logs_dir).await?;
}
let archive_path = ctx
.datadir
.join("archive")
.join("installed")
.join(&state.manifest.id);
if tokio::fs::metadata(&archive_path).await.is_ok() {
tokio::fs::remove_file(&archive_path).await?;
}
}
} }
Ok(()) Ok(())
} }
@@ -187,10 +236,9 @@ impl Service {
let ctx = ctx.clone(); let ctx = ctx.clone();
move |s9pk: S9pk, i: Model<PackageDataEntry>| async move { move |s9pk: S9pk, i: Model<PackageDataEntry>| async move {
for volume_id in &s9pk.as_manifest().volumes { for volume_id in &s9pk.as_manifest().volumes {
let tmp_path = let path = data_dir(&ctx.datadir, &s9pk.as_manifest().id, volume_id);
data_dir(&ctx.datadir, &s9pk.as_manifest().id.clone(), volume_id); if tokio::fs::metadata(&path).await.is_err() {
if tokio::fs::metadata(&tmp_path).await.is_err() { tokio::fs::create_dir_all(&path).await?;
tokio::fs::create_dir_all(&tmp_path).await?;
} }
} }
let start_stop = if i.as_status().as_main().de()?.running() { let start_stop = if i.as_status().as_main().de()?.running() {
@@ -368,7 +416,7 @@ impl Service {
service service
.seed .seed
.persistent_container .persistent_container
.execute( .execute::<NoOutput>(
Guid::new(), Guid::new(),
ProcedureName::Init, ProcedureName::Init,
to_value(&src_version)?, to_value(&src_version)?,

View File

@@ -43,6 +43,8 @@ const RPC_CONNECT_TIMEOUT: Duration = Duration::from_secs(10);
#[derive(Debug)] #[derive(Debug)]
pub struct ServiceState { pub struct ServiceState {
// indicates whether the service container runtime has been initialized yet
pub(super) rt_initialized: bool,
// This contains the start time and health check information for when the service is running. Note: Will be overwritting to the db, // This contains the start time and health check information for when the service is running. Note: Will be overwritting to the db,
pub(super) running_status: Option<RunningStatus>, pub(super) running_status: Option<RunningStatus>,
// This tracks references to callbacks registered by the running service: // This tracks references to callbacks registered by the running service:
@@ -65,6 +67,7 @@ pub struct ServiceStateKinds {
impl ServiceState { impl ServiceState {
pub fn new(desired_state: StartStop) -> Self { pub fn new(desired_state: StartStop) -> Self {
Self { Self {
rt_initialized: false,
running_status: Default::default(), running_status: Default::default(),
callbacks: Default::default(), callbacks: Default::default(),
temp_desired_state: Default::default(), temp_desired_state: Default::default(),
@@ -167,17 +170,17 @@ impl PersistentContainer {
.arg(&mountpoint) .arg(&mountpoint)
.invoke(crate::ErrorKind::Filesystem) .invoke(crate::ErrorKind::Filesystem)
.await?; .await?;
let s9pk_asset_path = Path::new("assets").join(asset).with_extension("squashfs");
let sqfs = s9pk
.as_archive()
.contents()
.get_path(&s9pk_asset_path)
.and_then(|e| e.as_file())
.or_not_found(s9pk_asset_path.display())?;
assets.insert( assets.insert(
asset.clone(), asset.clone(),
MountGuard::mount( MountGuard::mount(
&Bind::new( &IdMapped::new(LoopDev::from(&**sqfs), 0, 100000, 65536),
asset_dir(
&ctx.datadir,
&s9pk.as_manifest().id,
&s9pk.as_manifest().version,
)
.join(asset),
),
mountpoint, mountpoint,
MountType::ReadWrite, MountType::ReadWrite,
) )
@@ -369,6 +372,8 @@ impl PersistentContainer {
self.rpc_client.request(rpc::Init, Empty {}).await?; self.rpc_client.request(rpc::Init, Empty {}).await?;
self.state.send_modify(|s| s.rt_initialized = true);
Ok(()) Ok(())
} }
@@ -386,39 +391,31 @@ impl PersistentContainer {
let overlays = self.overlays.clone(); let overlays = self.overlays.clone();
let lxc_container = self.lxc_container.take(); let lxc_container = self.lxc_container.take();
self.destroyed = true; self.destroyed = true;
Some( Some(async move {
async move { let mut errs = ErrorCollection::new();
dbg!( if let Some((hdl, shutdown)) = rpc_server {
async move { errs.handle(rpc_client.request(rpc::Exit, Empty {}).await);
let mut errs = ErrorCollection::new(); shutdown.shutdown();
if let Some((hdl, shutdown)) = rpc_server { errs.handle(hdl.await.with_kind(ErrorKind::Cancelled));
errs.handle(rpc_client.request(rpc::Exit, Empty {}).await);
shutdown.shutdown();
errs.handle(hdl.await.with_kind(ErrorKind::Cancelled));
}
for (_, volume) in volumes {
errs.handle(volume.unmount(true).await);
}
for (_, assets) in assets {
errs.handle(assets.unmount(true).await);
}
for (_, overlay) in std::mem::take(&mut *overlays.lock().await) {
errs.handle(overlay.unmount(true).await);
}
for (_, images) in images {
errs.handle(images.unmount().await);
}
errs.handle(js_mount.unmount(true).await);
if let Some(lxc_container) = lxc_container {
errs.handle(lxc_container.exit().await);
}
dbg!(errs.into_result())
}
.await
)
} }
.map(|a| dbg!(a)), for (_, volume) in volumes {
) errs.handle(volume.unmount(true).await);
}
for (_, assets) in assets {
errs.handle(assets.unmount(true).await);
}
for (_, overlay) in std::mem::take(&mut *overlays.lock().await) {
errs.handle(overlay.unmount(true).await);
}
for (_, images) in images {
errs.handle(images.unmount().await);
}
errs.handle(js_mount.unmount(true).await);
if let Some(lxc_container) = lxc_container {
errs.handle(lxc_container.exit().await);
}
errs.into_result()
})
} }
#[instrument(skip_all)] #[instrument(skip_all)]

View File

@@ -2,10 +2,9 @@ use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use imbl::OrdMap; use imbl::OrdMap;
use models::PackageId;
use super::start_stop::StartStop; use super::start_stop::StartStop;
use super::ServiceActorSeed;
use crate::prelude::*; use crate::prelude::*;
use crate::service::transition::TransitionKind; use crate::service::transition::TransitionKind;
use crate::service::SYNC_RETRY_COOLDOWN_SECONDS; use crate::service::SYNC_RETRY_COOLDOWN_SECONDS;
@@ -13,8 +12,6 @@ use crate::status::MainStatus;
use crate::util::actor::background::BackgroundJobQueue; use crate::util::actor::background::BackgroundJobQueue;
use crate::util::actor::Actor; use crate::util::actor::Actor;
use super::ServiceActorSeed;
#[derive(Clone)] #[derive(Clone)]
pub(super) struct ServiceActor(pub(super) Arc<ServiceActorSeed>); pub(super) struct ServiceActor(pub(super) Arc<ServiceActorSeed>);
@@ -26,12 +23,12 @@ enum ServiceActorLoopNext {
impl Actor for ServiceActor { impl Actor for ServiceActor {
fn init(&mut self, jobs: &BackgroundJobQueue) { fn init(&mut self, jobs: &BackgroundJobQueue) {
let seed = self.0.clone(); let seed = self.0.clone();
let mut current = seed.persistent_container.state.subscribe();
jobs.add_job(async move { jobs.add_job(async move {
let id = seed.id.clone(); let _ = current.wait_for(|s| s.rt_initialized).await;
let mut current = seed.persistent_container.state.subscribe();
loop { loop {
match service_actor_loop(&current, &seed, &id).await { match service_actor_loop(&current, &seed).await {
ServiceActorLoopNext::Wait => tokio::select! { ServiceActorLoopNext::Wait => tokio::select! {
_ = current.changed() => (), _ = current.changed() => (),
}, },
@@ -45,8 +42,8 @@ impl Actor for ServiceActor {
async fn service_actor_loop( async fn service_actor_loop(
current: &tokio::sync::watch::Receiver<super::persistent_container::ServiceState>, current: &tokio::sync::watch::Receiver<super::persistent_container::ServiceState>,
seed: &Arc<ServiceActorSeed>, seed: &Arc<ServiceActorSeed>,
id: &PackageId,
) -> ServiceActorLoopNext { ) -> ServiceActorLoopNext {
let id = &seed.id;
let kinds = current.borrow().kinds(); let kinds = current.borrow().kinds();
if let Err(e) = async { if let Err(e) = async {
let main_status = match ( let main_status = match (

View File

@@ -18,6 +18,12 @@ use tracing::instrument;
use ts_rs::TS; use ts_rs::TS;
use crate::context::{CliContext, RpcContext}; use crate::context::{CliContext, RpcContext};
use crate::disk::mount::filesystem::bind::Bind;
use crate::disk::mount::filesystem::block_dev::BlockDev;
use crate::disk::mount::filesystem::efivarfs::{self, EfiVarFs};
use crate::disk::mount::filesystem::overlayfs::OverlayGuard;
use crate::disk::mount::filesystem::MountType;
use crate::disk::mount::guard::{GenericMountGuard, MountGuard, TmpMountGuard};
use crate::notifications::{notify, NotificationLevel}; use crate::notifications::{notify, NotificationLevel};
use crate::prelude::*; use crate::prelude::*;
use crate::progress::{FullProgressTracker, PhaseProgressTrackerHandle, PhasedProgressBar}; use crate::progress::{FullProgressTracker, PhaseProgressTrackerHandle, PhasedProgressBar};
@@ -247,6 +253,7 @@ async fn maybe_do_update(
asset.validate(SIG_CONTEXT, asset.all_signers())?; asset.validate(SIG_CONTEXT, asset.all_signers())?;
let progress = FullProgressTracker::new(); let progress = FullProgressTracker::new();
let prune_phase = progress.add_phase("Pruning Old OS Images".into(), Some(2));
let mut download_phase = progress.add_phase("Downloading File".into(), Some(100)); let mut download_phase = progress.add_phase("Downloading File".into(), Some(100));
download_phase.set_total(asset.commitment.size); download_phase.set_total(asset.commitment.size);
let reverify_phase = progress.add_phase("Reverifying File".into(), Some(10)); let reverify_phase = progress.add_phase("Reverifying File".into(), Some(10));
@@ -300,6 +307,7 @@ async fn maybe_do_update(
asset, asset,
UpdateProgressHandles { UpdateProgressHandles {
progress, progress,
prune_phase,
download_phase, download_phase,
reverify_phase, reverify_phase,
sync_boot_phase, sync_boot_phase,
@@ -369,6 +377,7 @@ async fn maybe_do_update(
struct UpdateProgressHandles { struct UpdateProgressHandles {
progress: FullProgressTracker, progress: FullProgressTracker,
prune_phase: PhaseProgressTrackerHandle,
download_phase: PhaseProgressTrackerHandle, download_phase: PhaseProgressTrackerHandle,
reverify_phase: PhaseProgressTrackerHandle, reverify_phase: PhaseProgressTrackerHandle,
sync_boot_phase: PhaseProgressTrackerHandle, sync_boot_phase: PhaseProgressTrackerHandle,
@@ -381,12 +390,20 @@ async fn do_update(
asset: RegistryAsset<Blake3Commitment>, asset: RegistryAsset<Blake3Commitment>,
UpdateProgressHandles { UpdateProgressHandles {
progress, progress,
mut prune_phase,
mut download_phase, mut download_phase,
mut reverify_phase, mut reverify_phase,
mut sync_boot_phase, mut sync_boot_phase,
mut finalize_phase, mut finalize_phase,
}: UpdateProgressHandles, }: UpdateProgressHandles,
) -> Result<(), Error> { ) -> Result<(), Error> {
prune_phase.start();
Command::new("/usr/lib/startos/scripts/prune-images")
.arg(asset.commitment.size.to_string())
.invoke(ErrorKind::Filesystem)
.await?;
prune_phase.complete();
download_phase.start(); download_phase.start();
let path = Path::new("/media/startos/images") let path = Path::new("/media/startos/images")
.join(hex::encode(&asset.commitment.hash[..16])) .join(hex::encode(&asset.commitment.hash[..16]))
@@ -420,6 +437,72 @@ async fn do_update(
.arg("boot") .arg("boot")
.invoke(crate::ErrorKind::Filesystem) .invoke(crate::ErrorKind::Filesystem)
.await?; .await?;
if &*PLATFORM != "raspberrypi" {
let mountpoint = "/media/startos/next";
let root_guard = OverlayGuard::mount(
TmpMountGuard::mount(&BlockDev::new(&path), MountType::ReadOnly).await?,
mountpoint,
)
.await?;
let startos = MountGuard::mount(
&Bind::new("/media/startos/root"),
root_guard.path().join("media/startos/root"),
MountType::ReadOnly,
)
.await?;
let boot_guard = MountGuard::mount(
&Bind::new("/boot"),
root_guard.path().join("boot"),
MountType::ReadWrite,
)
.await?;
let dev = MountGuard::mount(
&Bind::new("/dev"),
root_guard.path().join("dev"),
MountType::ReadWrite,
)
.await?;
let proc = MountGuard::mount(
&Bind::new("/proc"),
root_guard.path().join("proc"),
MountType::ReadWrite,
)
.await?;
let sys = MountGuard::mount(
&Bind::new("/sys"),
root_guard.path().join("sys"),
MountType::ReadWrite,
)
.await?;
let efivarfs = if tokio::fs::metadata("/sys/firmware/efi").await.is_ok() {
Some(
MountGuard::mount(
&EfiVarFs,
root_guard.path().join("sys/firmware/efi/efivars"),
MountType::ReadWrite,
)
.await?,
)
} else {
None
};
Command::new("chroot")
.arg(root_guard.path())
.arg("update-grub2")
.invoke(ErrorKind::Grub)
.await?;
if let Some(efivarfs) = efivarfs {
efivarfs.unmount(false).await?;
}
sys.unmount(false).await?;
proc.unmount(false).await?;
dev.unmount(false).await?;
boot_guard.unmount(false).await?;
startos.unmount(false).await?;
root_guard.unmount(false).await?;
}
sync_boot_phase.complete(); sync_boot_phase.complete();
finalize_phase.start(); finalize_phase.start();
@@ -429,6 +512,7 @@ async fn do_update(
.arg("/media/startos/config/current.rootfs") .arg("/media/startos/config/current.rootfs")
.invoke(crate::ErrorKind::Filesystem) .invoke(crate::ErrorKind::Filesystem)
.await?; .await?;
Command::new("sync").invoke(ErrorKind::Filesystem).await?;
finalize_phase.complete(); finalize_phase.complete();
progress.complete(); progress.complete();

View File

@@ -5,10 +5,12 @@ use std::task::Poll;
use std::time::Duration; use std::time::Duration;
use axum::body::Body; use axum::body::Body;
use axum::extract::Request;
use axum::response::Response; use axum::response::Response;
use futures::{ready, FutureExt, StreamExt}; use bytes::Bytes;
use futures::{ready, FutureExt, Stream, StreamExt};
use http::header::CONTENT_LENGTH; use http::header::CONTENT_LENGTH;
use http::StatusCode; use http::{HeaderMap, StatusCode};
use imbl_value::InternedString; use imbl_value::InternedString;
use tokio::fs::File; use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncSeek, AsyncSeekExt, AsyncWrite, AsyncWriteExt}; use tokio::io::{AsyncRead, AsyncSeek, AsyncSeekExt, AsyncWrite, AsyncWriteExt};
@@ -34,51 +36,7 @@ pub async fn upload(
ctx, ctx,
session, session,
|request| async move { |request| async move {
let headers = request.headers(); handle.upload(request).await;
let content_length = match headers.get(CONTENT_LENGTH).map(|a| a.to_str()) {
None => {
return Response::builder()
.status(StatusCode::BAD_REQUEST)
.body(Body::from("Content-Length is required"))
.with_kind(ErrorKind::Network)
}
Some(Err(_)) => {
return Response::builder()
.status(StatusCode::BAD_REQUEST)
.body(Body::from("Invalid Content-Length"))
.with_kind(ErrorKind::Network)
}
Some(Ok(a)) => match a.parse::<u64>() {
Err(_) => {
return Response::builder()
.status(StatusCode::BAD_REQUEST)
.body(Body::from("Invalid Content-Length"))
.with_kind(ErrorKind::Network)
}
Ok(a) => a,
},
};
handle
.progress
.send_modify(|p| p.expected_size = Some(content_length));
let mut body = request.into_body().into_data_stream();
while let Some(next) = body.next().await {
if let Err(e) = async {
handle
.write_all(&next.map_err(|e| {
std::io::Error::new(std::io::ErrorKind::Other, e)
})?)
.await?;
Ok(())
}
.await
{
handle.progress.send_if_modified(|p| p.handle_error(&e));
break;
}
}
Response::builder() Response::builder()
.status(StatusCode::NO_CONTENT) .status(StatusCode::NO_CONTENT)
@@ -364,6 +322,46 @@ pub struct UploadHandle {
file: File, file: File,
progress: watch::Sender<Progress>, progress: watch::Sender<Progress>,
} }
impl UploadHandle {
pub async fn upload(&mut self, request: Request) {
self.process_headers(request.headers());
self.process_body(request.into_body().into_data_stream())
.await;
}
pub async fn download(&mut self, response: reqwest::Response) {
self.process_headers(response.headers());
self.process_body(response.bytes_stream()).await;
}
fn process_headers(&mut self, headers: &HeaderMap) {
if let Some(content_length) = headers
.get(CONTENT_LENGTH)
.and_then(|a| a.to_str().log_err())
.and_then(|a| a.parse::<u64>().log_err())
{
self.progress
.send_modify(|p| p.expected_size = Some(content_length));
}
}
async fn process_body<E: Into<Box<(dyn std::error::Error + Send + Sync + 'static)>>>(
&mut self,
mut body: impl Stream<Item = Result<Bytes, E>> + Unpin,
) {
while let Some(next) = body.next().await {
if let Err(e) = async {
self.write_all(
&next.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?,
)
.await?;
Ok(())
}
.await
{
self.progress.send_if_modified(|p| p.handle_error(&e));
break;
}
}
}
}
#[pin_project::pinned_drop] #[pin_project::pinned_drop]
impl PinnedDrop for UploadHandle { impl PinnedDrop for UploadHandle {
fn drop(self: Pin<&mut Self>) { fn drop(self: Pin<&mut Self>) {

View File

@@ -411,107 +411,151 @@ impl<T: AsRef<[u8]>> CursorExt for Cursor<T> {
} }
} }
#[pin_project::pin_project]
#[derive(Debug)] #[derive(Debug)]
pub struct BackTrackingReader<T> { enum BTBuffer {
#[pin] NotBuffering,
reader: T, Buffering { read: Vec<u8>, write: Vec<u8> },
buffer: Cursor<Vec<u8>>, Rewound { read: Cursor<Vec<u8>> },
buffering: bool,
} }
impl<T> BackTrackingReader<T> { impl Default for BTBuffer {
pub fn new(reader: T) -> Self { fn default() -> Self {
Self { BTBuffer::NotBuffering
reader,
buffer: Cursor::new(Vec::new()),
buffering: false,
}
}
pub fn start_buffering(&mut self) {
self.buffer.set_position(0);
self.buffer.get_mut().truncate(0);
self.buffering = true;
}
pub fn stop_buffering(&mut self) {
self.buffer.set_position(0);
self.buffer.get_mut().truncate(0);
self.buffering = false;
}
pub fn rewind(&mut self) {
self.buffering = false;
}
pub fn unwrap(self) -> T {
self.reader
} }
} }
impl<T: AsyncRead> AsyncRead for BackTrackingReader<T> { #[pin_project::pin_project]
#[derive(Debug)]
pub struct BackTrackingIO<T> {
#[pin]
io: T,
buffer: BTBuffer,
}
impl<T> BackTrackingIO<T> {
pub fn new(io: T) -> Self {
Self {
io,
buffer: BTBuffer::Buffering {
read: Vec::new(),
write: Vec::new(),
},
}
}
#[must_use]
pub fn stop_buffering(&mut self) -> Vec<u8> {
match std::mem::take(&mut self.buffer) {
BTBuffer::Buffering { write, .. } => write,
BTBuffer::NotBuffering => Vec::new(),
BTBuffer::Rewound { read } => {
self.buffer = BTBuffer::Rewound { read };
Vec::new()
}
}
}
pub fn rewind(&mut self) -> Vec<u8> {
match std::mem::take(&mut self.buffer) {
BTBuffer::Buffering { read, write } => {
self.buffer = BTBuffer::Rewound {
read: Cursor::new(read),
};
write
}
BTBuffer::NotBuffering => Vec::new(),
BTBuffer::Rewound { read } => {
self.buffer = BTBuffer::Rewound { read };
Vec::new()
}
}
}
pub fn unwrap(self) -> T {
self.io
}
}
impl<T: AsyncRead> AsyncRead for BackTrackingIO<T> {
fn poll_read( fn poll_read(
self: std::pin::Pin<&mut Self>, self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>, cx: &mut std::task::Context<'_>,
buf: &mut ReadBuf<'_>, buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> { ) -> Poll<std::io::Result<()>> {
let this = self.project(); let this = self.project();
if *this.buffering { match this.buffer {
let filled = buf.filled().len(); BTBuffer::Buffering { read, .. } => {
let res = this.reader.poll_read(cx, buf); let filled = buf.filled().len();
this.buffer let res = this.io.poll_read(cx, buf);
.get_mut() read.extend_from_slice(&buf.filled()[filled..]);
.extend_from_slice(&buf.filled()[filled..]); res
res
} else {
let mut ready = false;
if (this.buffer.position() as usize) < this.buffer.get_ref().len() {
this.buffer.pure_read(buf);
ready = true;
} }
if buf.remaining() > 0 { BTBuffer::NotBuffering => this.io.poll_read(cx, buf),
match this.reader.poll_read(cx, buf) { BTBuffer::Rewound { read } => {
Poll::Pending => { let mut ready = false;
if ready { if (read.position() as usize) < read.get_ref().len() {
Poll::Ready(Ok(())) read.pure_read(buf);
} else { ready = true;
Poll::Pending }
} if buf.remaining() > 0 {
} match this.io.poll_read(cx, buf) {
a => a, Poll::Pending => {
if ready {
Poll::Ready(Ok(()))
} else {
Poll::Pending
}
}
a => a,
}
} else {
Poll::Ready(Ok(()))
} }
} else {
Poll::Ready(Ok(()))
} }
} }
} }
} }
impl<T: AsyncWrite> AsyncWrite for BackTrackingReader<T> { impl<T: AsyncWrite> AsyncWrite for BackTrackingIO<T> {
fn is_write_vectored(&self) -> bool { fn is_write_vectored(&self) -> bool {
self.reader.is_write_vectored() self.io.is_write_vectored()
} }
fn poll_flush( fn poll_flush(
self: std::pin::Pin<&mut Self>, self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>, cx: &mut std::task::Context<'_>,
) -> Poll<Result<(), std::io::Error>> { ) -> Poll<Result<(), std::io::Error>> {
self.project().reader.poll_flush(cx) self.project().io.poll_flush(cx)
} }
fn poll_shutdown( fn poll_shutdown(
self: std::pin::Pin<&mut Self>, self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>, cx: &mut std::task::Context<'_>,
) -> Poll<Result<(), std::io::Error>> { ) -> Poll<Result<(), std::io::Error>> {
self.project().reader.poll_shutdown(cx) self.project().io.poll_shutdown(cx)
} }
fn poll_write( fn poll_write(
self: std::pin::Pin<&mut Self>, self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>, cx: &mut std::task::Context<'_>,
buf: &[u8], buf: &[u8],
) -> Poll<Result<usize, std::io::Error>> { ) -> Poll<Result<usize, std::io::Error>> {
self.project().reader.poll_write(cx, buf) let this = self.project();
if let BTBuffer::Buffering { write, .. } = this.buffer {
write.extend_from_slice(buf);
Poll::Ready(Ok(buf.len()))
} else {
this.io.poll_write(cx, buf)
}
} }
fn poll_write_vectored( fn poll_write_vectored(
self: std::pin::Pin<&mut Self>, self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>, cx: &mut std::task::Context<'_>,
bufs: &[std::io::IoSlice<'_>], bufs: &[std::io::IoSlice<'_>],
) -> Poll<Result<usize, std::io::Error>> { ) -> Poll<Result<usize, std::io::Error>> {
self.project().reader.poll_write_vectored(cx, bufs) let this = self.project();
if let BTBuffer::Buffering { write, .. } = this.buffer {
let len = bufs.iter().map(|b| b.len()).sum();
write.reserve(len);
for buf in bufs {
write.extend_from_slice(buf);
}
Poll::Ready(Ok(len))
} else {
this.io.poll_write_vectored(cx, bufs)
}
} }
} }

View File

@@ -49,6 +49,7 @@ pub mod net;
pub mod rpc; pub mod rpc;
pub mod rpc_client; pub mod rpc_client;
pub mod serde; pub mod serde;
pub mod sync;
#[derive(Clone, Copy, Debug, ::serde::Deserialize, ::serde::Serialize)] #[derive(Clone, Copy, Debug, ::serde::Deserialize, ::serde::Serialize)]
pub enum Never {} pub enum Never {}

View File

@@ -568,6 +568,14 @@ where
#[derive(Deserialize, Serialize, TS)] #[derive(Deserialize, Serialize, TS)]
pub struct StdinDeserializable<T>(pub T); pub struct StdinDeserializable<T>(pub T);
impl<T> Default for StdinDeserializable<T>
where
T: Default,
{
fn default() -> Self {
Self(T::default())
}
}
impl<T> FromArgMatches for StdinDeserializable<T> impl<T> FromArgMatches for StdinDeserializable<T>
where where
T: DeserializeOwned, T: DeserializeOwned,

View File

@@ -0,0 +1,12 @@
pub struct SyncMutex<T>(std::sync::Mutex<T>);
impl<T> SyncMutex<T> {
pub fn new(t: T) -> Self {
Self(std::sync::Mutex::new(t))
}
pub fn mutate<F: FnOnce(&mut T) -> U, U>(&self, f: F) -> U {
f(&mut *self.0.lock().unwrap())
}
pub fn peek<F: FnOnce(&T) -> U, U>(&self, f: F) -> U {
f(&*self.0.lock().unwrap())
}
}

View File

@@ -14,8 +14,15 @@ mod v0_3_5;
mod v0_3_5_1; mod v0_3_5_1;
mod v0_3_5_2; mod v0_3_5_2;
mod v0_3_6_alpha_0; mod v0_3_6_alpha_0;
mod v0_3_6_alpha_1;
mod v0_3_6_alpha_2;
mod v0_3_6_alpha_3;
mod v0_3_6_alpha_4;
mod v0_3_6_alpha_5;
mod v0_3_6_alpha_6;
mod v0_3_6_alpha_7;
pub type Current = v0_3_6_alpha_0::Version; pub type Current = v0_3_6_alpha_3::Version; // VERSION_BUMP
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
#[serde(untagged)] #[serde(untagged)]
@@ -26,6 +33,13 @@ enum Version {
V0_3_5_1(Wrapper<v0_3_5_1::Version>), V0_3_5_1(Wrapper<v0_3_5_1::Version>),
V0_3_5_2(Wrapper<v0_3_5_2::Version>), V0_3_5_2(Wrapper<v0_3_5_2::Version>),
V0_3_6_alpha_0(Wrapper<v0_3_6_alpha_0::Version>), V0_3_6_alpha_0(Wrapper<v0_3_6_alpha_0::Version>),
V0_3_6_alpha_1(Wrapper<v0_3_6_alpha_1::Version>),
V0_3_6_alpha_2(Wrapper<v0_3_6_alpha_2::Version>),
V0_3_6_alpha_3(Wrapper<v0_3_6_alpha_3::Version>),
V0_3_6_alpha_4(Wrapper<v0_3_6_alpha_4::Version>),
V0_3_6_alpha_5(Wrapper<v0_3_6_alpha_5::Version>),
V0_3_6_alpha_6(Wrapper<v0_3_6_alpha_6::Version>),
V0_3_6_alpha_7(Wrapper<v0_3_6_alpha_7::Version>),
Other(exver::Version), Other(exver::Version),
} }
@@ -46,6 +60,13 @@ impl Version {
Version::V0_3_5_1(Wrapper(x)) => x.semver(), Version::V0_3_5_1(Wrapper(x)) => x.semver(),
Version::V0_3_5_2(Wrapper(x)) => x.semver(), Version::V0_3_5_2(Wrapper(x)) => x.semver(),
Version::V0_3_6_alpha_0(Wrapper(x)) => x.semver(), Version::V0_3_6_alpha_0(Wrapper(x)) => x.semver(),
Version::V0_3_6_alpha_1(Wrapper(x)) => x.semver(),
Version::V0_3_6_alpha_2(Wrapper(x)) => x.semver(),
Version::V0_3_6_alpha_3(Wrapper(x)) => x.semver(),
Version::V0_3_6_alpha_4(Wrapper(x)) => x.semver(),
Version::V0_3_6_alpha_5(Wrapper(x)) => x.semver(),
Version::V0_3_6_alpha_6(Wrapper(x)) => x.semver(),
Version::V0_3_6_alpha_7(Wrapper(x)) => x.semver(),
Version::Other(x) => x.clone(), Version::Other(x) => x.clone(),
} }
} }
@@ -246,6 +267,13 @@ pub async fn init(
Version::V0_3_5_1(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?, Version::V0_3_5_1(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?,
Version::V0_3_5_2(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?, Version::V0_3_5_2(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?,
Version::V0_3_6_alpha_0(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?, Version::V0_3_6_alpha_0(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?,
Version::V0_3_6_alpha_1(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?,
Version::V0_3_6_alpha_2(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?,
Version::V0_3_6_alpha_3(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?,
Version::V0_3_6_alpha_4(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?,
Version::V0_3_6_alpha_5(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?,
Version::V0_3_6_alpha_6(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?,
Version::V0_3_6_alpha_7(v) => v.0.migrate_to(&Current::new(), &db, &mut progress).await?,
Version::Other(_) => { Version::Other(_) => {
return Err(Error::new( return Err(Error::new(
eyre!("Cannot downgrade"), eyre!("Cannot downgrade"),
@@ -290,6 +318,15 @@ mod tests {
Just(Version::V0_3_5(Wrapper(v0_3_5::Version::new()))), Just(Version::V0_3_5(Wrapper(v0_3_5::Version::new()))),
Just(Version::V0_3_5_1(Wrapper(v0_3_5_1::Version::new()))), Just(Version::V0_3_5_1(Wrapper(v0_3_5_1::Version::new()))),
Just(Version::V0_3_5_2(Wrapper(v0_3_5_2::Version::new()))), Just(Version::V0_3_5_2(Wrapper(v0_3_5_2::Version::new()))),
Just(Version::V0_3_6_alpha_0(Wrapper(
v0_3_6_alpha_0::Version::new()
))),
Just(Version::V0_3_6_alpha_1(Wrapper(
v0_3_6_alpha_1::Version::new()
))),
Just(Version::V0_3_6_alpha_2(Wrapper(
v0_3_6_alpha_2::Version::new()
))),
em_version().prop_map(Version::Other), em_version().prop_map(Version::Other),
] ]
} }

View File

@@ -0,0 +1,35 @@
use exver::{PreReleaseSegment, VersionRange};
use super::v0_3_5::V0_3_0_COMPAT;
use super::{v0_3_6_alpha_0, VersionT};
use crate::db::model::Database;
use crate::prelude::*;
lazy_static::lazy_static! {
static ref V0_3_6_alpha_1: exver::Version = exver::Version::new(
[0, 3, 6],
[PreReleaseSegment::String("alpha".into()), 1.into()]
);
}
#[derive(Clone, Debug)]
pub struct Version;
impl VersionT for Version {
type Previous = v0_3_6_alpha_0::Version;
fn new() -> Self {
Version
}
fn semver(&self) -> exver::Version {
V0_3_6_alpha_1.clone()
}
fn compat(&self) -> &'static VersionRange {
&V0_3_0_COMPAT
}
async fn up(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
async fn down(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
}

View File

@@ -0,0 +1,35 @@
use exver::{PreReleaseSegment, VersionRange};
use super::v0_3_5::V0_3_0_COMPAT;
use super::{v0_3_6_alpha_1, VersionT};
use crate::db::model::Database;
use crate::prelude::*;
lazy_static::lazy_static! {
static ref V0_3_6_alpha_2: exver::Version = exver::Version::new(
[0, 3, 6],
[PreReleaseSegment::String("alpha".into()), 2.into()]
);
}
#[derive(Clone, Debug)]
pub struct Version;
impl VersionT for Version {
type Previous = v0_3_6_alpha_1::Version;
fn new() -> Self {
Version
}
fn semver(&self) -> exver::Version {
V0_3_6_alpha_2.clone()
}
fn compat(&self) -> &'static VersionRange {
&V0_3_0_COMPAT
}
async fn up(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
async fn down(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
}

View File

@@ -0,0 +1,35 @@
use exver::{PreReleaseSegment, VersionRange};
use super::v0_3_5::V0_3_0_COMPAT;
use super::{v0_3_6_alpha_2, VersionT};
use crate::db::model::Database;
use crate::prelude::*;
lazy_static::lazy_static! {
static ref V0_3_6_alpha_3: exver::Version = exver::Version::new(
[0, 3, 6],
[PreReleaseSegment::String("alpha".into()), 3.into()]
);
}
#[derive(Clone, Debug)]
pub struct Version;
impl VersionT for Version {
type Previous = v0_3_6_alpha_2::Version;
fn new() -> Self {
Version
}
fn semver(&self) -> exver::Version {
V0_3_6_alpha_3.clone()
}
fn compat(&self) -> &'static VersionRange {
&V0_3_0_COMPAT
}
async fn up(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
async fn down(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
}

View File

@@ -0,0 +1,35 @@
use exver::{PreReleaseSegment, VersionRange};
use super::v0_3_5::V0_3_0_COMPAT;
use super::{v0_3_6_alpha_3, VersionT};
use crate::db::model::Database;
use crate::prelude::*;
lazy_static::lazy_static! {
static ref V0_3_6_alpha_4: exver::Version = exver::Version::new(
[0, 3, 6],
[PreReleaseSegment::String("alpha".into()), 4.into()]
);
}
#[derive(Clone, Debug)]
pub struct Version;
impl VersionT for Version {
type Previous = v0_3_6_alpha_3::Version;
fn new() -> Self {
Version
}
fn semver(&self) -> exver::Version {
V0_3_6_alpha_4.clone()
}
fn compat(&self) -> &'static VersionRange {
&V0_3_0_COMPAT
}
async fn up(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
async fn down(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
}

View File

@@ -0,0 +1,35 @@
use exver::{PreReleaseSegment, VersionRange};
use super::v0_3_5::V0_3_0_COMPAT;
use super::{v0_3_6_alpha_4, VersionT};
use crate::db::model::Database;
use crate::prelude::*;
lazy_static::lazy_static! {
static ref V0_3_6_alpha_5: exver::Version = exver::Version::new(
[0, 3, 6],
[PreReleaseSegment::String("alpha".into()), 5.into()]
);
}
#[derive(Clone, Debug)]
pub struct Version;
impl VersionT for Version {
type Previous = v0_3_6_alpha_4::Version;
fn new() -> Self {
Version
}
fn semver(&self) -> exver::Version {
V0_3_6_alpha_5.clone()
}
fn compat(&self) -> &'static VersionRange {
&V0_3_0_COMPAT
}
async fn up(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
async fn down(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
}

View File

@@ -0,0 +1,35 @@
use exver::{PreReleaseSegment, VersionRange};
use super::v0_3_5::V0_3_0_COMPAT;
use super::{v0_3_6_alpha_5, VersionT};
use crate::db::model::Database;
use crate::prelude::*;
lazy_static::lazy_static! {
static ref V0_3_6_alpha_6: exver::Version = exver::Version::new(
[0, 3, 6],
[PreReleaseSegment::String("alpha".into()), 6.into()]
);
}
#[derive(Clone, Debug)]
pub struct Version;
impl VersionT for Version {
type Previous = v0_3_6_alpha_5::Version;
fn new() -> Self {
Version
}
fn semver(&self) -> exver::Version {
V0_3_6_alpha_6.clone()
}
fn compat(&self) -> &'static VersionRange {
&V0_3_0_COMPAT
}
async fn up(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
async fn down(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
}

View File

@@ -0,0 +1,35 @@
use exver::{PreReleaseSegment, VersionRange};
use super::v0_3_5::V0_3_0_COMPAT;
use super::{v0_3_6_alpha_6, VersionT};
use crate::db::model::Database;
use crate::prelude::*;
lazy_static::lazy_static! {
static ref V0_3_6_alpha_7: exver::Version = exver::Version::new(
[0, 3, 6],
[PreReleaseSegment::String("alpha".into()), 7.into()]
);
}
#[derive(Clone, Debug)]
pub struct Version;
impl VersionT for Version {
type Previous = v0_3_6_alpha_6::Version;
fn new() -> Self {
Version
}
fn semver(&self) -> exver::Version {
V0_3_6_alpha_7.clone()
}
fn compat(&self) -> &'static VersionRange {
&V0_3_0_COMPAT
}
async fn up(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
async fn down(&self, _db: &TypedPatchDb<Database>) -> Result<(), Error> {
Ok(())
}
}

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type ApiState = "error" | "initializing" | "running"

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type EchoParams = { message: string }

View File

@@ -1,4 +1,8 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PasswordType } from "./PasswordType" import type { PasswordType } from "./PasswordType"
export type LoginParams = { password: PasswordType | null; metadata: any } export type LoginParams = {
password: PasswordType | null
ephemeral: boolean
metadata: any
}

View File

@@ -15,6 +15,7 @@ export { AlpnInfo } from "./AlpnInfo"
export { AnySignature } from "./AnySignature" export { AnySignature } from "./AnySignature"
export { AnySigningKey } from "./AnySigningKey" export { AnySigningKey } from "./AnySigningKey"
export { AnyVerifyingKey } from "./AnyVerifyingKey" export { AnyVerifyingKey } from "./AnyVerifyingKey"
export { ApiState } from "./ApiState"
export { AttachParams } from "./AttachParams" export { AttachParams } from "./AttachParams"
export { BackupProgress } from "./BackupProgress" export { BackupProgress } from "./BackupProgress"
export { BackupTargetFS } from "./BackupTargetFS" export { BackupTargetFS } from "./BackupTargetFS"
@@ -42,6 +43,7 @@ export { DepInfo } from "./DepInfo"
export { Description } from "./Description" export { Description } from "./Description"
export { DestroyOverlayedImageParams } from "./DestroyOverlayedImageParams" export { DestroyOverlayedImageParams } from "./DestroyOverlayedImageParams"
export { Duration } from "./Duration" export { Duration } from "./Duration"
export { EchoParams } from "./EchoParams"
export { EncryptedWire } from "./EncryptedWire" export { EncryptedWire } from "./EncryptedWire"
export { ExecuteAction } from "./ExecuteAction" export { ExecuteAction } from "./ExecuteAction"
export { ExportActionParams } from "./ExportActionParams" export { ExportActionParams } from "./ExportActionParams"

View File

@@ -26,6 +26,8 @@ export class Overlay {
shared.push("run") shared.push("run")
} }
fs.copyFile("/etc/resolv.conf", `${rootfs}/etc/resolv.conf`)
for (const dirPart of shared) { for (const dirPart of shared) {
const from = `/${dirPart}` const from = `/${dirPart}`
const to = `${rootfs}/${dirPart}` const to = `${rootfs}/${dirPart}`

View File

@@ -90,6 +90,8 @@ export const addressHostToUrl = (
} else if (host.kind === "ip") { } else if (host.kind === "ip") {
if (host.hostname.kind === "domain") { if (host.hostname.kind === "domain") {
hostname = `${host.hostname.subdomain ? `${host.hostname.subdomain}.` : ""}${host.hostname.domain}` hostname = `${host.hostname.subdomain ? `${host.hostname.subdomain}.` : ""}${host.hostname.domain}`
} else if (host.hostname.kind === "ipv6") {
hostname = `[${host.hostname.value}]`
} else { } else {
hostname = host.hostname.value hostname = host.hostname.value
} }

View File

@@ -1,6 +1,6 @@
{ {
"name": "startos-ui", "name": "startos-ui",
"version": "0.3.6-alpha.0", "version": "0.3.6-alpha.3",
"author": "Start9 Labs, Inc", "author": "Start9 Labs, Inc",
"homepage": "https://start9.com/", "homepage": "https://start9.com/",
"license": "MIT", "license": "MIT",

View File

@@ -20,5 +20,6 @@
}, },
"ackInstructions": {}, "ackInstructions": {},
"theme": "Dark", "theme": "Dark",
"widgets": [] "widgets": [],
"ack-welcome": "0.3.6-alpha.3"
} }

View File

@@ -3,7 +3,6 @@ import { T } from '@start9labs/start-sdk'
export type GetPackageReq = { export type GetPackageReq = {
id: string id: string
version: string | null version: string | null
sourceVersion: null // @TODO what is this?
otherVersions: 'short' otherVersions: 'short'
} }
export type GetPackageRes = T.GetPackageResponse & { export type GetPackageRes = T.GetPackageResponse & {
@@ -13,9 +12,9 @@ export type GetPackageRes = T.GetPackageResponse & {
export type GetPackagesReq = { export type GetPackagesReq = {
id: null id: null
version: null version: null
sourceVersion: null
otherVersions: 'short' otherVersions: 'short'
} }
export type GetPackagesRes = { export type GetPackagesRes = {
[id: T.PackageId]: GetPackageRes [id: T.PackageId]: GetPackageRes
} }

View File

@@ -1,14 +1,16 @@
import { APP_INITIALIZER, Provider } from '@angular/core' import { APP_INITIALIZER, inject, Provider } from '@angular/core'
import { UntypedFormBuilder } from '@angular/forms' import { UntypedFormBuilder } from '@angular/forms'
import { Router, RouteReuseStrategy } from '@angular/router' import { Router, RouteReuseStrategy } from '@angular/router'
import { IonicRouteStrategy, IonNav } from '@ionic/angular' import { IonicRouteStrategy, IonNav } from '@ionic/angular'
import { RELATIVE_URL, THEME, WorkspaceConfig } from '@start9labs/shared' import { RELATIVE_URL, THEME, WorkspaceConfig } from '@start9labs/shared'
import { TUI_ICONS_PATH } from '@taiga-ui/core' import { TUI_DIALOGS_CLOSE, TUI_ICONS_PATH } from '@taiga-ui/core'
import { PatchDB } from 'patch-db-client' import { PatchDB } from 'patch-db-client'
import { filter, pairwise } from 'rxjs'
import { import {
PATCH_CACHE, PATCH_CACHE,
PatchDbSource, PatchDbSource,
} from 'src/app/services/patch-db/patch-db-source' } from 'src/app/services/patch-db/patch-db-source'
import { StateService } from 'src/app/services/state.service'
import { ApiService } from './services/api/embassy-api.service' import { ApiService } from './services/api/embassy-api.service'
import { MockApiService } from './services/api/embassy-mock-api.service' import { MockApiService } from './services/api/embassy-mock-api.service'
import { LiveApiService } from './services/api/embassy-live-api.service' import { LiveApiService } from './services/api/embassy-live-api.service'
@@ -58,6 +60,17 @@ export const APP_PROVIDERS: Provider[] = [
provide: TUI_ICONS_PATH, provide: TUI_ICONS_PATH,
useValue: (name: string) => `/assets/taiga-ui/icons/${name}.svg#${name}`, useValue: (name: string) => `/assets/taiga-ui/icons/${name}.svg#${name}`,
}, },
{
provide: TUI_DIALOGS_CLOSE,
useFactory: () =>
inject(StateService).pipe(
pairwise(),
filter(
([prev, curr]) =>
prev === 'running' && (curr === 'error' || curr === 'initializing'),
),
),
},
] ]
export function appInitializer( export function appInitializer(

View File

@@ -1,7 +1,6 @@
<ng-container [ngSwitch]="spec.type"> <ng-container [ngSwitch]="spec.type">
<form-color *ngSwitchCase="'color'"></form-color> <form-color *ngSwitchCase="'color'"></form-color>
<form-datetime *ngSwitchCase="'datetime'"></form-datetime> <form-datetime *ngSwitchCase="'datetime'"></form-datetime>
<form-file *ngSwitchCase="'file'"></form-file>
<form-multiselect *ngSwitchCase="'multiselect'"></form-multiselect> <form-multiselect *ngSwitchCase="'multiselect'"></form-multiselect>
<form-number *ngSwitchCase="'number'"></form-number> <form-number *ngSwitchCase="'number'"></form-number>
<form-select *ngSwitchCase="'select'"></form-select> <form-select *ngSwitchCase="'select'"></form-select>

View File

@@ -1,31 +0,0 @@
<tui-input-files
[pseudoInvalid]="invalid"
[(ngModel)]="value"
(focusedChange)="onFocus($event)"
>
<input tuiInputFiles [accept]="spec.extensions.join(',')" />
<ng-template let-drop>
<div class="template" [class.template_hidden]="drop">
<div class="label">
{{ spec.name }}
<span *ngIf="spec.required">*</span>
<tui-tooltip
*ngIf="spec.description"
[content]="spec.description"
></tui-tooltip>
</div>
<tui-tag
*ngIf="value; else label"
class="file"
size="l"
[value]="value.name"
[removable]="true"
(edited)="value = null"
></tui-tag>
<ng-template #label>
<small>Click or drop file here</small>
</ng-template>
</div>
<div class="drop" [class.drop_hidden]="!drop">Drop file here</div>
</ng-template>
</tui-input-files>

View File

@@ -1,46 +0,0 @@
@import '@taiga-ui/core/styles/taiga-ui-local';
.template {
@include transition(opacity);
width: 100%;
display: flex;
align-items: center;
padding: 0 0.5rem;
font: var(--tui-font-text-m);
font-weight: bold;
&_hidden {
opacity: 0;
}
}
.drop {
@include fullsize();
@include transition(opacity);
display: flex;
align-items: center;
justify-content: space-around;
&_hidden {
opacity: 0;
}
}
.label {
display: flex;
align-items: center;
max-width: 50%;
}
small {
max-width: 50%;
font-weight: normal;
color: var(--tui-text-02);
margin-left: auto;
}
tui-tag {
z-index: 1;
margin: -0.25rem -0.25rem -0.25rem auto;
}

View File

@@ -1,11 +0,0 @@
import { Component } from '@angular/core'
import { TuiFileLike } from '@taiga-ui/kit'
import { CT } from '@start9labs/start-sdk'
import { Control } from '../control'
@Component({
selector: 'form-file',
templateUrl: './form-file.component.html',
styleUrls: ['./form-file.component.scss'],
})
export class FormFileComponent extends Control<CT.ValueSpecFile, TuiFileLike> {}

View File

@@ -40,7 +40,6 @@ import { FormToggleComponent } from './form-toggle/form-toggle.component'
import { FormTextareaComponent } from './form-textarea/form-textarea.component' import { FormTextareaComponent } from './form-textarea/form-textarea.component'
import { FormNumberComponent } from './form-number/form-number.component' import { FormNumberComponent } from './form-number/form-number.component'
import { FormSelectComponent } from './form-select/form-select.component' import { FormSelectComponent } from './form-select/form-select.component'
import { FormFileComponent } from './form-file/form-file.component'
import { FormMultiselectComponent } from './form-multiselect/form-multiselect.component' import { FormMultiselectComponent } from './form-multiselect/form-multiselect.component'
import { FormUnionComponent } from './form-union/form-union.component' import { FormUnionComponent } from './form-union/form-union.component'
import { FormObjectComponent } from './form-object/form-object.component' import { FormObjectComponent } from './form-object/form-object.component'
@@ -96,7 +95,6 @@ import { HintPipe } from './hint.pipe'
FormNumberComponent, FormNumberComponent,
FormSelectComponent, FormSelectComponent,
FormMultiselectComponent, FormMultiselectComponent,
FormFileComponent,
FormUnionComponent, FormUnionComponent,
FormObjectComponent, FormObjectComponent,
FormArrayComponent, FormArrayComponent,

View File

@@ -6,7 +6,7 @@ import {
isEmptyObject, isEmptyObject,
LoadingService, LoadingService,
} from '@start9labs/shared' } from '@start9labs/shared'
import { CT } from '@start9labs/start-sdk' import { CT, T } from '@start9labs/start-sdk'
import { TuiButtonModule } from '@taiga-ui/experimental' import { TuiButtonModule } from '@taiga-ui/experimental'
import { import {
TuiDialogContext, TuiDialogContext,
@@ -203,8 +203,6 @@ export class ConfigModal {
const loader = new Subscription() const loader = new Subscription()
try { try {
await this.uploadFiles(config, loader)
if (hasCurrentDeps(this.pkgId, await getAllPackages(this.patchDb))) { if (hasCurrentDeps(this.pkgId, await getAllPackages(this.patchDb))) {
await this.configureDeps(config, loader) await this.configureDeps(config, loader)
} else { } else {
@@ -217,24 +215,6 @@ export class ConfigModal {
} }
} }
private async uploadFiles(config: Record<string, any>, loader: Subscription) {
loader.unsubscribe()
loader.closed = false
// TODO: Could be nested files
const keys = Object.keys(config).filter(key => config[key] instanceof File)
const message = `Uploading File${keys.length > 1 ? 's' : ''}...`
if (!keys.length) return
loader.add(this.loader.open(message).subscribe())
const hashes = await Promise.all(
keys.map(key => this.embassyApi.uploadFile(config[key])),
)
keys.forEach((key, i) => (config[key] = hashes[i]))
}
private async configureDeps( private async configureDeps(
config: Record<string, any>, config: Record<string, any>,
loader: Subscription, loader: Subscription,
@@ -265,11 +245,11 @@ export class ConfigModal {
this.context.$implicit.complete() this.context.$implicit.complete()
} }
private async approveBreakages(breakages: Breakages): Promise<boolean> { private async approveBreakages(breakages: T.PackageId[]): Promise<boolean> {
const packages = await getAllPackages(this.patchDb) const packages = await getAllPackages(this.patchDb)
const message = const message =
'As a result of this change, the following services will no longer work properly and may crash:<ul>' 'As a result of this change, the following services will no longer work properly and may crash:<ul>'
const content = `${message}${Object.keys(breakages).map( const content = `${message}${breakages.map(
id => `<li><b>${getManifest(packages[id]).title}</b></li>`, id => `<li><b>${getManifest(packages[id]).title}</b></li>`,
)}</ul>` )}</ul>`
const data: TuiPromptData = { content, yes: 'Continue', no: 'Cancel' } const data: TuiPromptData = { content, yes: 'Continue', no: 'Cancel' }

View File

@@ -12,7 +12,7 @@
<ion-content class="ion-padding"> <ion-content class="ion-padding">
<h2>This Release</h2> <h2>This Release</h2>
<h4>0.3.6-alpha.0</h4> <h4>0.3.6-alpha.3</h4>
<h6>This is an ALPHA release! DO NOT use for production data!</h6> <h6>This is an ALPHA release! DO NOT use for production data!</h6>
<h6>Expect that any data you create or store on this version of the OS can be LOST FOREVER!</h6> <h6>Expect that any data you create or store on this version of the OS can be LOST FOREVER!</h6>

View File

@@ -20,6 +20,20 @@ import {
import { getAllPackages, getManifest } from 'src/app/util/get-package-data' import { getAllPackages, getManifest } from 'src/app/util/get-package-data'
import { hasCurrentDeps } from 'src/app/util/has-deps' import { hasCurrentDeps } from 'src/app/util/has-deps'
const allowedStatuses = {
onlyRunning: new Set(['running']),
onlyStopped: new Set(['stopped']),
any: new Set([
'running',
'stopped',
'restarting',
'restoring',
'stopping',
'starting',
'backingUp',
]),
}
@Component({ @Component({
selector: 'app-actions', selector: 'app-actions',
templateUrl: './app-actions.page.html', templateUrl: './app-actions.page.html',
@@ -46,7 +60,10 @@ export class AppActionsPage {
status: T.Status, status: T.Status,
action: { key: string; value: T.ActionMetadata }, action: { key: string; value: T.ActionMetadata },
) { ) {
if (status && action.value.allowedStatuses.includes(status.main.status)) { if (
status &&
allowedStatuses[action.value.allowedStatuses].has(status.main.status)
) {
if (!isEmptyObject(action.value.input || {})) { if (!isEmptyObject(action.value.input || {})) {
this.formDialog.open(FormComponent, { this.formDialog.open(FormComponent, {
label: action.value.name, label: action.value.name,
@@ -84,7 +101,7 @@ export class AppActionsPage {
await alert.present() await alert.present()
} }
} else { } else {
const statuses = [...action.value.allowedStatuses] const statuses = [...allowedStatuses[action.value.allowedStatuses]]
const last = statuses.pop() const last = statuses.pop()
let statusesStr = statuses.join(', ') let statusesStr = statuses.join(', ')
let error = '' let error = ''

View File

@@ -25,7 +25,7 @@
slot="end" slot="end"
fill="clear" fill="clear"
color="primary" color="primary"
(click)="launchUi($event, pkg.entry.serviceInterfaces)" (click)="launchUi($event, pkg.entry.serviceInterfaces, pkg.entry.hosts)"
[disabled]=" [disabled]="
!(pkg.entry.stateInfo.state | isLaunchable : pkgMainStatus.status) !(pkg.entry.stateInfo.state | isLaunchable : pkgMainStatus.status)
" "

View File

@@ -27,9 +27,13 @@ export class AppListPkgComponent {
return this.pkgMainStatus.status === 'stopping' ? '30s' : null // @dr-bonez TODO return this.pkgMainStatus.status === 'stopping' ? '30s' : null // @dr-bonez TODO
} }
launchUi(e: Event, interfaces: PackageDataEntry['serviceInterfaces']): void { launchUi(
e: Event,
interfaces: PackageDataEntry['serviceInterfaces'],
hosts: PackageDataEntry['hosts'],
): void {
e.stopPropagation() e.stopPropagation()
e.preventDefault() e.preventDefault()
this.launcherService.launch(interfaces) this.launcherService.launch(interfaces, hosts)
} }
} }

View File

@@ -56,13 +56,13 @@
</ion-button> </ion-button>
<ion-button <ion-button
*ngIf="pkgStatus && interfaces && (interfaces | hasUi)" *ngIf="pkgStatus && interfaces && (interfaces | hasUi) && hosts"
class="action-button" class="action-button"
color="primary" color="primary"
[disabled]=" [disabled]="
!(pkg.stateInfo.state | isLaunchable: pkgStatus.main.status) !(pkg.stateInfo.state | isLaunchable: pkgStatus.main.status)
" "
(click)="launchUi(interfaces)" (click)="launchUi(interfaces, hosts)"
> >
<ion-icon slot="start" name="open-outline"></ion-icon> <ion-icon slot="start" name="open-outline"></ion-icon>
Launch UI Launch UI

View File

@@ -55,6 +55,10 @@ export class AppShowStatusComponent {
return this.pkg.serviceInterfaces return this.pkg.serviceInterfaces
} }
get hosts(): PackageDataEntry['hosts'] {
return this.pkg.hosts
}
get pkgStatus(): T.Status { get pkgStatus(): T.Status {
return this.pkg.status return this.pkg.status
} }
@@ -79,8 +83,11 @@ export class AppShowStatusComponent {
return this.pkgStatus?.main.status === 'stopping' ? '30s' : null // @dr-bonez TODO return this.pkgStatus?.main.status === 'stopping' ? '30s' : null // @dr-bonez TODO
} }
launchUi(interfaces: PackageDataEntry['serviceInterfaces']): void { launchUi(
this.launcherService.launch(interfaces) interfaces: PackageDataEntry['serviceInterfaces'],
hosts: PackageDataEntry['hosts'],
): void {
this.launcherService.launch(interfaces, hosts)
} }
async presentModalConfig(): Promise<void> { async presentModalConfig(): Promise<void> {

View File

@@ -58,9 +58,7 @@ export class InitService extends Observable<MappedProgress> {
} }
}), }),
catchError(e => { catchError(e => {
// @TODO Alex this toast is presenting when we navigate away from init page. It seems other websockets exhibit the same behavior, but we never noticed because the error were not being caught and presented in this manner. It seems odd that unsubscribing from a websocket subject would be treated as an error. console.error(e)
// this.errorService.handleError(e)
return EMPTY return EMPTY
}), }),
) )

View File

@@ -40,6 +40,7 @@ export class LoginPage {
await this.api.login({ await this.api.login({
password: this.password, password: this.password,
metadata: { platforms: getPlatforms() }, metadata: { platforms: getPlatforms() },
ephemeral: window.location.host === 'localhost',
}) })
this.password = '' this.password = ''

View File

@@ -76,7 +76,7 @@ const routes: Routes = [
import('./ssh-keys/ssh-keys.module').then(m => m.SSHKeysPageModule), import('./ssh-keys/ssh-keys.module').then(m => m.SSHKeysPageModule),
}, },
{ {
path: 'wifi', path: 'wireless',
loadChildren: () => loadChildren: () =>
import('./wifi/wifi.module').then(m => m.WifiPageModule), import('./wifi/wifi.module').then(m => m.WifiPageModule),
}, },

View File

@@ -50,8 +50,7 @@
<ion-item <ion-item
*ngFor="let button of cat.value" *ngFor="let button of cat.value"
button button
[style.display]="((button.title === 'Repair Disk' && !(showDiskRepair$ | async))) || (button.title === 'WiFi' && !(wifiConnected$ | async)) ? 'none' : 'block'" [style.display]="((button.title === 'Repair Disk' && !(showDiskRepair$ | async))) ? 'none' : 'block'"
[color]="button.title === 'WiFi' ? 'warning' : ''"
[detail]="button.detail" [detail]="button.detail"
[disabled]="button.disabled$ | async" [disabled]="button.disabled$ | async"
(click)="button.action()" (click)="button.action()"

View File

@@ -40,7 +40,6 @@ export class ServerShowPage {
readonly server$ = this.patch.watch$('serverInfo') readonly server$ = this.patch.watch$('serverInfo')
readonly showUpdate$ = this.eosService.showUpdate$ readonly showUpdate$ = this.eosService.showUpdate$
readonly showDiskRepair$ = this.ClientStorageService.showDiskRepair$ readonly showDiskRepair$ = this.ClientStorageService.showDiskRepair$
readonly wifiConnected$ = this.patch.watch$('serverInfo', 'wifi', 'selected')
constructor( constructor(
private readonly alertCtrl: AlertController, private readonly alertCtrl: AlertController,
@@ -475,11 +474,14 @@ export class ServerShowPage {
disabled$: of(false), disabled$: of(false),
}, },
{ {
title: 'WiFi', title: 'Wireless',
description: 'WiFi is deprecated. Click to learn more.', description:
'Connect your server to WiFi instead of Ethernet (not recommended)',
icon: 'wifi', icon: 'wifi',
action: () => action: () =>
this.navCtrl.navigateForward(['wifi'], { relativeTo: this.route }), this.navCtrl.navigateForward(['wireless'], {
relativeTo: this.route,
}),
detail: true, detail: true,
disabled$: of(false), disabled$: of(false),
}, },

View File

@@ -52,8 +52,15 @@
></ion-icon> ></ion-icon>
<ion-label> <ion-label>
<h1>{{ getPlatformName(currentSession.metadata.platforms) }}</h1> <h1>{{ getPlatformName(currentSession.metadata.platforms) }}</h1>
<h2>Last Active: {{ currentSession.lastActive| date : 'medium' }}</h2> <h2 *ngIf="currentSession.userAgent as agent">{{ agent }}</h2>
<p>{{ currentSession.userAgent }}</p> <p>
<b>First Seen</b>
: {{ currentSession.loggedIn| date : 'medium' }}
</p>
<p>
<b>Last Active</b>
: {{ currentSession.lastActive| date : 'medium' }}
</p>
</ion-label> </ion-label>
</ion-item> </ion-item>
@@ -78,8 +85,15 @@
></ion-icon> ></ion-icon>
<ion-label> <ion-label>
<h1>{{ getPlatformName(session.metadata.platforms) }}</h1> <h1>{{ getPlatformName(session.metadata.platforms) }}</h1>
<h2>Last Active: {{ session.lastActive | date : 'medium' }}</h2> <h2 *ngIf="currentSession.userAgent as agent">{{ agent }}</h2>
<p>{{ session.userAgent }}</p> <p>
<b>First Seen</b>
: {{ currentSession.loggedIn| date : 'medium' }}
</p>
<p>
<b>Last Active</b>
: {{ currentSession.lastActive| date : 'medium' }}
</p>
</ion-label> </ion-label>
<ion-button <ion-button
slot="end" slot="end"

View File

@@ -27,17 +27,11 @@ export class SessionsPage {
this.currentSession = sessionInfo.sessions[sessionInfo.current] this.currentSession = sessionInfo.sessions[sessionInfo.current]
delete sessionInfo.sessions[sessionInfo.current] delete sessionInfo.sessions[sessionInfo.current]
this.otherSessions = Object.entries(sessionInfo.sessions) this.otherSessions = Object.entries(sessionInfo.sessions)
.map(([id, session]) => { .map(([id, session]) => ({ id, ...session }))
return { .sort(
id, (a, b) =>
...session, new Date(b.lastActive).valueOf() - new Date(a.lastActive).valueOf(),
} )
})
.sort((a, b) => {
return (
new Date(b.lastActive).valueOf() - new Date(a.lastActive).valueOf()
)
})
} catch (e: any) { } catch (e: any) {
this.errorService.handleError(e) this.errorService.handleError(e)
} finally { } finally {
@@ -108,10 +102,6 @@ export class SessionsPage {
return 'Unknown Device' return 'Unknown Device'
} }
} }
asIsOrder(a: any, b: any) {
return 0
}
} }
interface SessionWithId extends Session { interface SessionWithId extends Session {

View File

@@ -138,8 +138,8 @@ export class SideloadPage {
).getUint32(0, false) ).getUint32(0, false)
await getPositions(start, end, file, positions, tocLength as any) await getPositions(start, end, file, positions, tocLength as any)
await this.getManifest(positions, file) await this.getManifestV1(positions, file)
await this.getIcon(positions, file) await this.getIconV1(positions, file)
} }
async parseS9pkV2(file: File) { async parseS9pkV2(file: File) {
@@ -148,7 +148,7 @@ export class SideloadPage {
this.toUpload.icon = await s9pk.icon() this.toUpload.icon = await s9pk.icon()
} }
async getManifest(positions: Positions, file: Blob) { private async getManifestV1(positions: Positions, file: Blob) {
const data = await blobToBuffer( const data = await blobToBuffer(
file.slice( file.slice(
Number(positions['manifest'][0]), Number(positions['manifest'][0]),
@@ -158,12 +158,11 @@ export class SideloadPage {
this.toUpload.manifest = await cbor.decode(data, true) this.toUpload.manifest = await cbor.decode(data, true)
} }
async getIcon(positions: Positions, file: Blob) { private async getIconV1(positions: Positions, file: Blob) {
const contentType = '' // @TODO
const data = file.slice( const data = file.slice(
Number(positions['icon'][0]), Number(positions['icon'][0]),
Number(positions['icon'][0]) + Number(positions['icon'][1]), Number(positions['icon'][0]) + Number(positions['icon'][1]),
contentType, '',
) )
this.toUpload.icon = await blobToDataURL(data) this.toUpload.icon = await blobToDataURL(data)
} }

Some files were not shown because too many files have changed in this diff Show More