Feature/UI sideload (#2658)

* ui sideloading

* remove subtlecrypto import

* fix parser

* misc fixes

* allow docker pull during compat conversion
This commit is contained in:
Aiden McClelland
2024-06-28 15:03:01 -06:00
committed by GitHub
parent c16d8a1da1
commit 822dd5e100
101 changed files with 1901 additions and 797 deletions

View File

@@ -17,7 +17,7 @@ COMPAT_SRC := $(shell git ls-files system-images/compat/)
UTILS_SRC := $(shell git ls-files system-images/utils/)
BINFMT_SRC := $(shell git ls-files system-images/binfmt/)
CORE_SRC := $(shell git ls-files core) $(shell git ls-files --recurse-submodules patch-db) web/dist/static web/patchdb-ui-seed.json $(GIT_HASH_FILE)
WEB_SHARED_SRC := $(shell git ls-files web/projects/shared) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules/.package-lock.json web/config.json patch-db/client/dist web/patchdb-ui-seed.json
WEB_SHARED_SRC := $(shell git ls-files web/projects/shared) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules/.package-lock.json web/config.json patch-db/client/dist web/patchdb-ui-seed.json sdk/dist
WEB_UI_SRC := $(shell git ls-files web/projects/ui)
WEB_SETUP_WIZARD_SRC := $(shell git ls-files web/projects/setup-wizard)
WEB_INSTALL_WIZARD_SRC := $(shell git ls-files web/projects/install-wizard)
@@ -262,15 +262,19 @@ web/node_modules/.package-lock.json: web/package.json sdk/dist
npm --prefix web ci
touch web/node_modules/.package-lock.json
web/dist/raw/ui: $(WEB_UI_SRC) $(WEB_SHARED_SRC)
web/.angular: patch-db/client/dist sdk/dist web/node_modules/.package-lock.json
rm -rf web/.angular
mkdir -p web/.angular
web/dist/raw/ui: $(WEB_UI_SRC) $(WEB_SHARED_SRC) web/.angular
npm --prefix web run build:ui
touch web/dist/raw/ui
web/dist/raw/setup-wizard: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC)
web/dist/raw/setup-wizard: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular
npm --prefix web run build:setup
touch web/dist/raw/setup-wizard
web/dist/raw/install-wizard: $(WEB_INSTALL_WIZARD_SRC) $(WEB_SHARED_SRC)
web/dist/raw/install-wizard: $(WEB_INSTALL_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular
npm --prefix web run build:install-wiz
touch web/dist/raw/install-wizard

View File

@@ -9,11 +9,13 @@
"version": "0.0.0",
"dependencies": {
"@iarna/toml": "^2.2.5",
"@noble/curves": "^1.4.0",
"@noble/hashes": "^1.4.0",
"@start9labs/start-sdk": "file:../sdk/dist",
"esbuild-plugin-resolve": "^2.0.0",
"filebrowser": "^1.0.0",
"isomorphic-fetch": "^3.0.0",
"lodash": "^4.17.21",
"lodash.merge": "^4.6.2",
"node-fetch": "^3.1.0",
"ts-matches": "^5.5.1",
"tslib": "^2.5.3",
@@ -30,24 +32,27 @@
},
"../sdk/dist": {
"name": "@start9labs/start-sdk",
"version": "0.3.6-alpha1",
"version": "0.3.6-alpha5",
"license": "MIT",
"dependencies": {
"@iarna/toml": "^2.2.5",
"@noble/curves": "^1.4.0",
"@noble/hashes": "^1.4.0",
"isomorphic-fetch": "^3.0.0",
"lodash": "^4.17.21",
"ts-matches": "^5.4.1"
"lodash.merge": "^4.6.2",
"mime": "^4.0.3",
"ts-matches": "^5.5.1",
"yaml": "^2.2.2"
},
"devDependencies": {
"@iarna/toml": "^2.2.5",
"@types/jest": "^29.4.0",
"@types/lodash": "^4.17.5",
"@types/lodash.merge": "^4.6.2",
"jest": "^29.4.3",
"prettier": "^3.2.5",
"ts-jest": "^29.0.5",
"ts-node": "^10.9.1",
"tsx": "^4.7.1",
"typescript": "^5.0.4",
"yaml": "^2.2.2"
"typescript": "^5.0.4"
}
},
"node_modules/@iarna/toml": {
@@ -72,6 +77,28 @@
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
}
},
"node_modules/@noble/curves": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.4.0.tgz",
"integrity": "sha512-p+4cb332SFCrReJkCYe8Xzm0OWi4Jji5jVdIZRL/PmacmDkFNw6MrrV+gGpiPxLHbV+zKFRywUWbaseT+tZRXg==",
"dependencies": {
"@noble/hashes": "1.4.0"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@noble/hashes": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.4.0.tgz",
"integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==",
"engines": {
"node": ">= 16"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@nodelib/fs.scandir": {
"version": "2.1.5",
"dev": true,
@@ -1316,10 +1343,10 @@
"json-buffer": "3.0.1"
}
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
"node_modules/lodash.merge": {
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="
},
"node_modules/lowercase-keys": {
"version": "2.0.0",
@@ -2233,6 +2260,19 @@
"os-filter-obj": "^2.0.0"
}
},
"@noble/curves": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.4.0.tgz",
"integrity": "sha512-p+4cb332SFCrReJkCYe8Xzm0OWi4Jji5jVdIZRL/PmacmDkFNw6MrrV+gGpiPxLHbV+zKFRywUWbaseT+tZRXg==",
"requires": {
"@noble/hashes": "1.4.0"
}
},
"@noble/hashes": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.4.0.tgz",
"integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg=="
},
"@nodelib/fs.scandir": {
"version": "2.1.5",
"dev": true,
@@ -2261,14 +2301,17 @@
"version": "file:../sdk/dist",
"requires": {
"@iarna/toml": "^2.2.5",
"@noble/curves": "^1.4.0",
"@noble/hashes": "^1.4.0",
"@types/jest": "^29.4.0",
"@types/lodash": "^4.17.5",
"@types/lodash.merge": "^4.6.2",
"isomorphic-fetch": "^3.0.0",
"jest": "^29.4.3",
"lodash": "^4.17.21",
"lodash.merge": "^4.6.2",
"mime": "^4.0.3",
"prettier": "^3.2.5",
"ts-jest": "^29.0.5",
"ts-matches": "^5.4.1",
"ts-matches": "^5.5.1",
"ts-node": "^10.9.1",
"tsx": "^4.7.1",
"typescript": "^5.0.4",
@@ -2988,10 +3031,10 @@
"json-buffer": "3.0.1"
}
},
"lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
"lodash.merge": {
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="
},
"lowercase-keys": {
"version": "2.0.0",

View File

@@ -18,10 +18,12 @@
"dependencies": {
"@iarna/toml": "^2.2.5",
"@start9labs/start-sdk": "file:../sdk/dist",
"@noble/hashes": "^1.4.0",
"@noble/curves": "^1.4.0",
"esbuild-plugin-resolve": "^2.0.0",
"filebrowser": "^1.0.0",
"isomorphic-fetch": "^3.0.0",
"lodash": "^4.17.21",
"lodash.merge": "^4.6.2",
"node-fetch": "^3.1.0",
"ts-matches": "^5.5.1",
"tslib": "^2.5.3",

223
core/Cargo.lock generated
View File

@@ -166,6 +166,12 @@ version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545"
[[package]]
name = "arrayvec"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b"
[[package]]
name = "arrayvec"
version = "0.7.4"
@@ -508,9 +514,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.5.0"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
dependencies = [
"serde",
]
@@ -521,16 +527,28 @@ version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d084b0137aaa901caf9f1e8b21daa6aa24d41cd806e111335541eff9683bd6"
[[package]]
name = "bitvec"
version = "0.19.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55f93d0ef3363c364d5976646a38f04cf67cfe1d4c8d160cdea02cab2c116b33"
dependencies = [
"funty 1.1.0",
"radium 0.5.3",
"tap",
"wyz 0.2.0",
]
[[package]]
name = "bitvec"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
dependencies = [
"funty",
"radium",
"funty 2.0.0",
"radium 0.7.0",
"tap",
"wyz",
"wyz 0.5.1",
]
[[package]]
@@ -540,7 +558,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23285ad32269793932e830392f2fe2f83e26488fd3ec778883a93c8323735780"
dependencies = [
"arrayref",
"arrayvec",
"arrayvec 0.7.4",
"constant_time_eq",
]
@@ -551,7 +569,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30cca6d3674597c30ddf2c587bf8d9d65c9a84d2326d941cc79c9842dfe0ef52"
dependencies = [
"arrayref",
"arrayvec",
"arrayvec 0.7.4",
"cc",
"cfg-if",
"constant_time_eq",
@@ -624,9 +642,9 @@ checksum = "981520c98f422fcc584dc1a95c334e6953900b9106bc47a9839b81790009eb21"
[[package]]
name = "cc"
version = "1.0.100"
version = "1.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c891175c3fb232128f48de6590095e59198bbeb8620c310be349bfc3afd12c7b"
checksum = "ac367972e516d45567c7eafc73d24e1c193dcf200a8d94e9db7b3d38b349572d"
dependencies = [
"jobserver",
"libc",
@@ -1051,7 +1069,7 @@ version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df"
dependencies = [
"bitflags 2.5.0",
"bitflags 2.6.0",
"crossterm_winapi",
"futures-core",
"libc",
@@ -1216,7 +1234,7 @@ version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "709ade444d53896e60f6265660eb50480dd08b77bfc822e5dcc233b88b0b2fba"
dependencies = [
"bitvec",
"bitvec 1.0.1",
"deku_derive",
"no_std_io",
"rustversion",
@@ -1416,9 +1434,9 @@ dependencies = [
[[package]]
name = "either"
version = "1.12.0"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b"
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
dependencies = [
"serde",
]
@@ -1445,12 +1463,13 @@ dependencies = [
[[package]]
name = "emver"
version = "0.1.7"
source = "git+https://github.com/Start9Labs/emver-rs.git#61cf0bc96711b4d6f3f30df8efef025e0cc02bad"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed260c4d7efaec031b9c4f6c4d3cf136e3df2bbfe50925800236f5e847f28704"
dependencies = [
"either",
"fp-core",
"nom",
"nom 6.1.2",
"serde",
]
@@ -1529,6 +1548,24 @@ version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
[[package]]
name = "exver"
version = "0.2.0"
source = "git+https://github.com/Start9Labs/exver-rs.git#29f52c1be18a0fe187670beac92822994b0d1949"
dependencies = [
"either",
"emver",
"fp-core",
"getrandom 0.2.15",
"itertools 0.13.0",
"memchr",
"pest",
"pest_derive",
"serde",
"smallvec",
"yasi",
]
[[package]]
name = "eyre"
version = "0.6.12"
@@ -1648,6 +1685,12 @@ dependencies = [
"itertools 0.8.2",
]
[[package]]
name = "funty"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7"
[[package]]
name = "funty"
version = "2.0.0"
@@ -1799,7 +1842,7 @@ version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8283e7331b8c93b9756e0cfdbcfb90312852f953c6faf9bf741e684cc3b6ad69"
dependencies = [
"bitflags 2.5.0",
"bitflags 2.6.0",
"crc",
"log",
"uuid",
@@ -1913,7 +1956,7 @@ dependencies = [
"base64 0.21.7",
"byteorder",
"flate2",
"nom",
"nom 7.1.3",
"num-traits",
]
@@ -2461,6 +2504,15 @@ dependencies = [
"either",
]
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "1.0.11"
@@ -2637,6 +2689,19 @@ dependencies = [
"spin",
]
[[package]]
name = "lexical-core"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe"
dependencies = [
"arrayvec 0.5.2",
"bitflags 1.3.2",
"cfg-if",
"ryu",
"static_assertions",
]
[[package]]
name = "libc"
version = "0.2.155"
@@ -2655,7 +2720,7 @@ version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
dependencies = [
"bitflags 2.5.0",
"bitflags 2.6.0",
"libc",
]
@@ -2731,7 +2796,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c487024623ae38584610237dd1be8932bb2b324474b23c37a25f9fbe6bf5e9e"
dependencies = [
"bincode",
"bitvec",
"bitvec 1.0.1",
"serde",
"serde-big-array",
"thiserror",
@@ -2821,7 +2886,7 @@ dependencies = [
"base64 0.21.7",
"color-eyre",
"ed25519-dalek 2.1.1",
"emver",
"exver",
"ipnet",
"lazy_static",
"mbrman",
@@ -2908,7 +2973,7 @@ version = "0.27.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053"
dependencies = [
"bitflags 2.5.0",
"bitflags 2.6.0",
"cfg-if",
"libc",
]
@@ -2922,6 +2987,19 @@ dependencies = [
"memchr",
]
[[package]]
name = "nom"
version = "6.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2"
dependencies = [
"bitvec 0.19.6",
"funty 1.1.0",
"lexical-core",
"memchr",
"version_check",
]
[[package]]
name = "nom"
version = "7.1.3"
@@ -2958,9 +3036,9 @@ dependencies = [
[[package]]
name = "num-bigint"
version = "0.4.5"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c165a9ab64cf766f73521c0dd2cfdff64f488b8f0b3e621face3462d3db536d7"
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
dependencies = [
"num-integer",
"num-traits",
@@ -3116,7 +3194,7 @@ version = "0.10.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f"
dependencies = [
"bitflags 2.5.0",
"bitflags 2.6.0",
"cfg-if",
"foreign-types",
"libc",
@@ -3309,6 +3387,51 @@ version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pest"
version = "2.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "560131c633294438da9f7c4b08189194b20946c8274c6b9e38881a7874dc8ee8"
dependencies = [
"memchr",
"thiserror",
"ucd-trie",
]
[[package]]
name = "pest_derive"
version = "2.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26293c9193fbca7b1a3bf9b79dc1e388e927e6cacaa78b4a3ab705a1d3d41459"
dependencies = [
"pest",
"pest_generator",
]
[[package]]
name = "pest_generator"
version = "2.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ec22af7d3fb470a85dd2ca96b7c577a1eb4ef6f1683a9fe9a8c16e136c04687"
dependencies = [
"pest",
"pest_meta",
"proc-macro2",
"quote",
"syn 2.0.68",
]
[[package]]
name = "pest_meta"
version = "2.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7a240022f37c361ec1878d646fc5b7d7c4d28d5946e1a80ad5a7a4f4ca0bdcd"
dependencies = [
"once_cell",
"pest",
"sha2 0.10.8",
]
[[package]]
name = "petgraph"
version = "0.6.5"
@@ -3466,7 +3589,7 @@ checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d"
dependencies = [
"bit-set",
"bit-vec",
"bitflags 2.5.0",
"bitflags 2.6.0",
"lazy_static",
"num-traits",
"rand 0.8.5",
@@ -3552,6 +3675,12 @@ dependencies = [
"proc-macro2",
]
[[package]]
name = "radium"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8"
[[package]]
name = "radium"
version = "0.7.0"
@@ -3697,7 +3826,7 @@ version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd"
dependencies = [
"bitflags 2.5.0",
"bitflags 2.6.0",
]
[[package]]
@@ -3948,7 +4077,7 @@ version = "0.38.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f"
dependencies = [
"bitflags 2.5.0",
"bitflags 2.6.0",
"errno",
"libc",
"linux-raw-sys",
@@ -4133,7 +4262,7 @@ version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0"
dependencies = [
"bitflags 2.5.0",
"bitflags 2.6.0",
"core-foundation",
"core-foundation-sys",
"libc",
@@ -4198,9 +4327,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.117"
version = "1.0.118"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3"
checksum = "d947f6b3163d8857ea16c4fa0dd4840d52f3041039a85decd46867eb1abef2e4"
dependencies = [
"indexmap 2.2.6",
"itoa",
@@ -4459,7 +4588,7 @@ version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f895e3734318cc55f1fe66258926c9b910c124d47520339efecbb6c59cec7c1f"
dependencies = [
"nom",
"nom 7.1.3",
"unicode_categories",
]
@@ -4566,7 +4695,7 @@ checksum = "1ed31390216d20e538e447a7a9b959e06ed9fc51c37b514b46eb758016ecd418"
dependencies = [
"atoi",
"base64 0.21.7",
"bitflags 2.5.0",
"bitflags 2.6.0",
"byteorder",
"bytes",
"chrono",
@@ -4609,7 +4738,7 @@ checksum = "7c824eb80b894f926f89a0b9da0c7f435d27cdd35b8c655b114e58223918577e"
dependencies = [
"atoi",
"base64 0.21.7",
"bitflags 2.5.0",
"bitflags 2.6.0",
"byteorder",
"chrono",
"crc",
@@ -4765,7 +4894,7 @@ dependencies = [
"ed25519 2.2.3",
"ed25519-dalek 1.0.1",
"ed25519-dalek 2.1.1",
"emver",
"exver",
"fd-lock-rs",
"futures",
"gpt",
@@ -4799,7 +4928,7 @@ dependencies = [
"models",
"new_mime_guess",
"nix 0.27.1",
"nom",
"nom 7.1.3",
"num",
"num_enum",
"once_cell",
@@ -4861,6 +4990,12 @@ dependencies = [
"zeroize",
]
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "stderrlog"
version = "0.5.4"
@@ -5113,9 +5248,9 @@ dependencies = [
[[package]]
name = "tinyvec"
version = "1.6.0"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
checksum = "c55115c6fbe2d2bef26eb09ad74bde02d8255476fc0c7b515ef09fbb35742d82"
dependencies = [
"tinyvec_macros",
]
@@ -5634,6 +5769,12 @@ version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "ucd-trie"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9"
[[package]]
name = "unarray"
version = "0.1.4"
@@ -6121,6 +6262,12 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "wyz"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214"
[[package]]
name = "wyz"
version = "0.5.1"

View File

@@ -12,7 +12,7 @@ color-eyre = "0.6.2"
ed25519-dalek = { version = "2.0.0", features = ["serde"] }
lazy_static = "1.4"
mbrman = "0.5.2"
emver = { version = "0.1", git = "https://github.com/Start9Labs/emver-rs.git", features = [
exver = { version = "0.2.0", git = "https://github.com/Start9Labs/exver-rs.git", features = [
"serde",
] }
ipnet = "2.8.0"

View File

@@ -242,8 +242,8 @@ impl From<std::string::FromUtf8Error> for Error {
Error::new(e, ErrorKind::Utf8)
}
}
impl From<emver::ParseError> for Error {
fn from(e: emver::ParseError) -> Self {
impl From<exver::ParseError> for Error {
fn from(e: exver::ParseError) -> Self {
Error::new(e, ErrorKind::ParseVersion)
}
}

View File

@@ -8,14 +8,14 @@ use ts_rs::TS;
#[derive(Debug, Clone, TS)]
#[ts(type = "string", rename = "Version")]
pub struct VersionString {
version: emver::Version,
version: exver::ExtendedVersion,
string: String,
}
impl VersionString {
pub fn as_str(&self) -> &str {
self.string.as_str()
}
pub fn into_version(self) -> emver::Version {
pub fn into_version(self) -> exver::ExtendedVersion {
self.version
}
}
@@ -25,7 +25,7 @@ impl std::fmt::Display for VersionString {
}
}
impl std::str::FromStr for VersionString {
type Err = <emver::Version as FromStr>::Err;
type Err = <exver::ExtendedVersion as FromStr>::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(VersionString {
string: s.to_owned(),
@@ -33,32 +33,32 @@ impl std::str::FromStr for VersionString {
})
}
}
impl From<emver::Version> for VersionString {
fn from(v: emver::Version) -> Self {
impl From<exver::ExtendedVersion> for VersionString {
fn from(v: exver::ExtendedVersion) -> Self {
VersionString {
string: v.to_string(),
version: v,
}
}
}
impl From<VersionString> for emver::Version {
impl From<VersionString> for exver::ExtendedVersion {
fn from(v: VersionString) -> Self {
v.version
}
}
impl Default for VersionString {
fn default() -> Self {
Self::from(emver::Version::default())
Self::from(exver::ExtendedVersion::default())
}
}
impl Deref for VersionString {
type Target = emver::Version;
type Target = exver::ExtendedVersion;
fn deref(&self) -> &Self::Target {
&self.version
}
}
impl AsRef<emver::Version> for VersionString {
fn as_ref(&self) -> &emver::Version {
impl AsRef<exver::ExtendedVersion> for VersionString {
fn as_ref(&self) -> &exver::ExtendedVersion {
&self.version
}
}
@@ -80,7 +80,13 @@ impl PartialOrd for VersionString {
}
impl Ord for VersionString {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.version.cmp(&other.version)
self.version.partial_cmp(&other.version).unwrap_or_else(|| {
match (self.version.flavor(), other.version.flavor()) {
(None, Some(_)) => std::cmp::Ordering::Greater,
(Some(_), None) => std::cmp::Ordering::Less,
(a, b) => a.cmp(&b),
}
})
}
}
impl Hash for VersionString {
@@ -94,7 +100,8 @@ impl<'de> Deserialize<'de> for VersionString {
D: Deserializer<'de>,
{
let string = String::deserialize(deserializer)?;
let version = emver::Version::from_str(&string).map_err(::serde::de::Error::custom)?;
let version =
exver::ExtendedVersion::from_str(&string).map_err(::serde::de::Error::custom)?;
Ok(Self { string, version })
}
}

View File

@@ -86,7 +86,7 @@ ed25519-dalek = { version = "2.1.1", features = [
"pkcs8",
] }
ed25519-dalek-v1 = { package = "ed25519-dalek", version = "1" }
emver = { version = "0.1.7", git = "https://github.com/Start9Labs/emver-rs.git", features = [
exver = { version = "0.2.0", git = "https://github.com/Start9Labs/exver-rs.git", features = [
"serde",
] }
fd-lock-rs = "0.1.4"

View File

@@ -28,6 +28,7 @@ pub struct AccountInfo {
pub root_ca_key: PKey<Private>,
pub root_ca_cert: X509,
pub ssh_key: ssh_key::PrivateKey,
pub compat_s9pk_key: ed25519_dalek::SigningKey,
}
impl AccountInfo {
pub fn new(password: &str, start_time: SystemTime) -> Result<Self, Error> {
@@ -39,6 +40,7 @@ impl AccountInfo {
let ssh_key = ssh_key::PrivateKey::from(ssh_key::private::Ed25519Keypair::random(
&mut rand::thread_rng(),
));
let compat_s9pk_key = ed25519_dalek::SigningKey::generate(&mut rand::thread_rng());
Ok(Self {
server_id,
hostname,
@@ -47,6 +49,7 @@ impl AccountInfo {
root_ca_key,
root_ca_cert,
ssh_key,
compat_s9pk_key,
})
}
@@ -61,6 +64,7 @@ impl AccountInfo {
let root_ca_key = cert_store.as_root_key().de()?.0;
let root_ca_cert = cert_store.as_root_cert().de()?.0;
let ssh_key = db.as_private().as_ssh_privkey().de()?.0;
let compat_s9pk_key = db.as_private().as_compat_s9pk_key().de()?.0;
Ok(Self {
server_id,
@@ -70,6 +74,7 @@ impl AccountInfo {
root_ca_key,
root_ca_cert,
ssh_key,
compat_s9pk_key,
})
}
@@ -92,6 +97,9 @@ impl AccountInfo {
db.as_private_mut()
.as_ssh_privkey_mut()
.ser(Pem::new_ref(&self.ssh_key))?;
db.as_private_mut()
.as_compat_s9pk_key_mut()
.ser(Pem::new_ref(&self.compat_s9pk_key))?;
let key_store = db.as_private_mut().as_key_store_mut();
key_store.as_onion_mut().insert_key(&self.tor_key)?;
let cert_store = key_store.as_local_certs_mut();

View File

@@ -85,6 +85,7 @@ impl OsBackupV0 {
ssh_key::Algorithm::Ed25519,
)?,
tor_key: TorSecretKeyV3::from(self.tor_key.0),
compat_s9pk_key: ed25519_dalek::SigningKey::generate(&mut rand::thread_rng()),
},
ui: self.ui,
})
@@ -113,6 +114,7 @@ impl OsBackupV1 {
root_ca_cert: self.root_ca_cert.0,
ssh_key: ssh_key::PrivateKey::from(Ed25519Keypair::from_seed(&self.net_key.0)),
tor_key: TorSecretKeyV3::from(ed25519_expand_key(&self.net_key.0)),
compat_s9pk_key: ed25519_dalek::SigningKey::from_bytes(&self.net_key),
},
ui: self.ui,
}
@@ -124,13 +126,14 @@ impl OsBackupV1 {
#[serde(rename = "kebab-case")]
struct OsBackupV2 {
server_id: String, // uuidv4
hostname: String, // <adjective>-<noun>
root_ca_key: Pem<PKey<Private>>, // PEM Encoded OpenSSL Key
root_ca_cert: Pem<X509>, // PEM Encoded OpenSSL X509 Certificate
ssh_key: Pem<ssh_key::PrivateKey>, // PEM Encoded OpenSSH Key
tor_key: TorSecretKeyV3, // Base64 Encoded Ed25519 Expanded Secret Key
ui: Value, // JSON Value
server_id: String, // uuidv4
hostname: String, // <adjective>-<noun>
root_ca_key: Pem<PKey<Private>>, // PEM Encoded OpenSSL Key
root_ca_cert: Pem<X509>, // PEM Encoded OpenSSL X509 Certificate
ssh_key: Pem<ssh_key::PrivateKey>, // PEM Encoded OpenSSH Key
tor_key: TorSecretKeyV3, // Base64 Encoded Ed25519 Expanded Secret Key
compat_s9pk_key: Pem<ed25519_dalek::SigningKey>, // PEM Encoded ED25519 Key
ui: Value, // JSON Value
}
impl OsBackupV2 {
fn project(self) -> OsBackup {
@@ -143,6 +146,7 @@ impl OsBackupV2 {
root_ca_cert: self.root_ca_cert.0,
ssh_key: self.ssh_key.0,
tor_key: self.tor_key,
compat_s9pk_key: self.compat_s9pk_key.0,
},
ui: self.ui,
}
@@ -155,6 +159,7 @@ impl OsBackupV2 {
root_ca_cert: Pem(backup.account.root_ca_cert.clone()),
ssh_key: Pem(backup.account.ssh_key.clone()),
tor_key: backup.account.tor_key.clone(),
compat_s9pk_key: Pem(backup.account.compat_s9pk_key.clone()),
ui: backup.ui.clone(),
}
}

View File

@@ -156,16 +156,14 @@ async fn restore_packages(
let mut tasks = BTreeMap::new();
for id in ids {
let backup_dir = backup_guard.clone().package_backup(&id);
let s9pk_path = backup_dir.path().join(&id).with_extension("s9pk");
let task = ctx
.services
.install(
ctx.clone(),
S9pk::open(
backup_dir.path().join(&id).with_extension("s9pk"),
Some(&id),
)
.await?,
|| S9pk::open(s9pk_path, Some(&id)),
Some(backup_dir),
None,
)
.await?;
tasks.insert(id, task);

View File

@@ -7,6 +7,7 @@ use clap::Parser;
use color_eyre::eyre::eyre;
use digest::generic_array::GenericArray;
use digest::OutputSizeUser;
use exver::Version;
use models::PackageId;
use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize};
@@ -194,7 +195,7 @@ pub async fn list(ctx: RpcContext) -> Result<BTreeMap<BackupTargetId, BackupTarg
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct BackupInfo {
pub version: VersionString,
pub version: Version,
pub timestamp: Option<DateTime<Utc>>,
pub package_backups: BTreeMap<PackageId, PackageBackupInfo>,
}
@@ -204,7 +205,7 @@ pub struct BackupInfo {
pub struct PackageBackupInfo {
pub title: String,
pub version: VersionString,
pub os_version: VersionString,
pub os_version: Version,
pub timestamp: DateTime<Utc>,
}
@@ -223,9 +224,9 @@ fn display_backup_info(params: WithIoFormat<InfoParams>, info: BackupInfo) {
"TIMESTAMP",
]);
table.add_row(row![
"EMBASSY OS",
info.version.as_str(),
info.version.as_str(),
"StartOS",
&info.version.to_string(),
&info.version.to_string(),
&if let Some(ts) = &info.timestamp {
ts.to_string()
} else {
@@ -236,7 +237,7 @@ fn display_backup_info(params: WithIoFormat<InfoParams>, info: BackupInfo) {
let row = row![
&*id,
info.version.as_str(),
info.os_version.as_str(),
&info.os_version.to_string(),
&info.timestamp.to_string(),
];
table.add_row(row);

View File

@@ -43,7 +43,9 @@ impl Drop for CliContextSeed {
std::fs::create_dir_all(&parent_dir).unwrap();
}
let mut writer = fd_lock_rs::FdLock::lock(
File::create(&tmp).unwrap(),
File::create(&tmp)
.with_ctx(|_| (ErrorKind::Filesystem, &tmp))
.unwrap(),
fd_lock_rs::LockType::Exclusive,
true,
)
@@ -80,9 +82,12 @@ impl CliContext {
});
let cookie_store = Arc::new(CookieStoreMutex::new({
let mut store = if cookie_path.exists() {
CookieStore::load_json(BufReader::new(File::open(&cookie_path)?))
.map_err(|e| eyre!("{}", e))
.with_kind(crate::ErrorKind::Deserialization)?
CookieStore::load_json(BufReader::new(
File::open(&cookie_path)
.with_ctx(|_| (ErrorKind::Filesystem, cookie_path.display()))?,
))
.map_err(|e| eyre!("{}", e))
.with_kind(crate::ErrorKind::Deserialization)?
} else {
CookieStore::default()
};

View File

@@ -37,7 +37,10 @@ pub trait ContextConfig: DeserializeOwned + Default {
.map(|f| f.parse())
.transpose()?
.unwrap_or_default();
format.from_reader(File::open(path)?)
format.from_reader(
File::open(path.as_ref())
.with_ctx(|_| (ErrorKind::Filesystem, path.as_ref().display()))?,
)
}
fn load_path_rec(&mut self, path: Option<impl AsRef<Path>>) -> Result<(), Error> {
if let Some(path) = path.filter(|p| p.as_ref().exists()) {

View File

@@ -14,7 +14,6 @@ use rpc_toolkit::{CallRemote, Context, Empty};
use tokio::sync::{broadcast, Mutex, RwLock};
use tokio::time::Instant;
use tracing::instrument;
use url::Url;
use super::setup::CURRENT_SECRET;
use crate::account::AccountInfo;

View File

@@ -40,6 +40,7 @@ impl Database {
notifications: Notifications::new(),
cifs: CifsTargets::new(),
package_stores: BTreeMap::new(),
compat_s9pk_key: Pem(account.compat_s9pk_key.clone()),
}, // TODO
})
}

View File

@@ -1,7 +1,7 @@
use std::collections::{BTreeMap, BTreeSet};
use chrono::{DateTime, Utc};
use emver::VersionRange;
use exver::VersionRange;
use imbl_value::InternedString;
use models::{ActionId, DataUrl, HealthCheckId, HostId, PackageId, ServiceInterfaceId};
use patch_db::json_ptr::JsonPointer;

View File

@@ -19,6 +19,8 @@ use crate::util::serde::Pem;
pub struct Private {
pub key_store: KeyStore,
pub password: String, // argon2 hash
#[serde(default = "generate_compat_key")]
pub compat_s9pk_key: Pem<ed25519_dalek::SigningKey>,
pub ssh_privkey: Pem<ssh_key::PrivateKey>,
pub ssh_pubkeys: SshKeys,
pub available_ports: AvailablePorts,
@@ -28,3 +30,7 @@ pub struct Private {
#[serde(default)]
pub package_stores: BTreeMap<PackageId, Value>,
}
fn generate_compat_key() -> Pem<ed25519_dalek::SigningKey> {
Pem(ed25519_dalek::SigningKey::generate(&mut rand::thread_rng()))
}

View File

@@ -2,7 +2,7 @@ use std::collections::{BTreeMap, BTreeSet};
use std::net::{Ipv4Addr, Ipv6Addr};
use chrono::{DateTime, Utc};
use emver::VersionRange;
use exver::{Version, VersionRange};
use imbl_value::InternedString;
use ipnet::{Ipv4Net, Ipv6Net};
use isocountry::CountryCode;
@@ -21,7 +21,6 @@ use crate::net::utils::{get_iface_ipv4_addr, get_iface_ipv6_addr};
use crate::prelude::*;
use crate::progress::FullProgress;
use crate::util::cpupower::Governor;
use crate::util::VersionString;
use crate::version::{Current, VersionT};
use crate::{ARCH, PLATFORM};
@@ -43,7 +42,7 @@ impl Public {
arch: get_arch(),
platform: get_platform(),
id: account.server_id.clone(),
version: Current::new().semver().into(),
version: Current::new().semver(),
hostname: account.hostname.no_dot_host_name(),
last_backup: None,
eos_version_compat: Current::new().compat().clone(),
@@ -109,7 +108,8 @@ pub struct ServerInfo {
pub platform: InternedString,
pub id: String,
pub hostname: String,
pub version: VersionString,
#[ts(type = "string")]
pub version: Version,
#[ts(type = "string | null")]
pub last_backup: Option<DateTime<Utc>>,
#[ts(type = "string")]

View File

@@ -8,8 +8,8 @@ use ed25519_dalek::{SigningKey, VerifyingKey};
use tracing::instrument;
use crate::context::CliContext;
use crate::prelude::*;
use crate::util::serde::Pem;
use crate::{Error, ResultExt};
#[instrument(skip_all)]
pub fn init(ctx: CliContext) -> Result<(), Error> {
@@ -26,7 +26,8 @@ pub fn init(ctx: CliContext) -> Result<(), Error> {
secret_key: secret.to_bytes(),
public_key: Some(PublicKeyBytes(VerifyingKey::from(&secret).to_bytes())),
};
let mut dev_key_file = File::create(&ctx.developer_key_path)?;
let mut dev_key_file = File::create(&ctx.developer_key_path)
.with_ctx(|_| (ErrorKind::Filesystem, ctx.developer_key_path.display()))?;
dev_key_file.write_all(
keypair_bytes
.to_pkcs8_pem(base64ct::LineEnding::default())

View File

@@ -102,10 +102,10 @@ fn display_disk_info(params: WithIoFormat<Empty>, args: Vec<DiskInfo>) {
} else {
"N/A"
},
if let Some(eos) = part.start_os.as_ref() {
eos.version.as_str()
&if let Some(eos) = part.start_os.as_ref() {
eos.version.to_string()
} else {
"N/A"
"N/A".to_owned()
},
];
table.add_row(row);

View File

@@ -20,7 +20,7 @@ use super::mount::guard::TmpMountGuard;
use crate::disk::mount::guard::GenericMountGuard;
use crate::disk::OsPartitionInfo;
use crate::util::serde::IoFormat;
use crate::util::{Invoke, VersionString};
use crate::util::Invoke;
use crate::{Error, ResultExt as _};
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
@@ -56,7 +56,7 @@ pub struct PartitionInfo {
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct EmbassyOsRecoveryInfo {
pub version: VersionString,
pub version: exver::Version,
pub full: bool,
pub password_hash: Option<String>,
pub wrapped_key: Option<String>,

View File

@@ -3,13 +3,12 @@ use std::path::Path;
use async_compression::tokio::bufread::GzipDecoder;
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use tokio::io::BufReader;
use tokio::process::Command;
use crate::disk::fsck::RequiresReboot;
use crate::prelude::*;
use crate::progress::PhaseProgressTrackerHandle;
use crate::util::io::open_file;
use crate::util::Invoke;
use crate::PLATFORM;
@@ -134,7 +133,7 @@ pub async fn update_firmware(firmware: Firmware) -> Result<(), Error> {
.invoke(ErrorKind::Filesystem)
.await?;
let mut rdr = if tokio::fs::metadata(&firmware_path).await.is_ok() {
GzipDecoder::new(BufReader::new(File::open(&firmware_path).await?))
GzipDecoder::new(BufReader::new(open_file(&firmware_path).await?))
} else {
return Err(Error::new(
eyre!("Firmware {id}.rom.gz not found in {firmware_dir:?}"),

View File

@@ -5,7 +5,7 @@ use std::os::unix::fs::PermissionsExt;
use std::path::Path;
use std::time::{Duration, SystemTime};
use axum::extract::ws::{self, CloseFrame};
use axum::extract::ws::{self};
use color_eyre::eyre::eyre;
use futures::{StreamExt, TryStreamExt};
use itertools::Itertools;
@@ -31,7 +31,7 @@ use crate::progress::{
};
use crate::rpc_continuations::{Guid, RpcContinuation};
use crate::ssh::SSH_AUTHORIZED_KEYS_FILE;
use crate::util::io::IOHook;
use crate::util::io::{create_file, IOHook};
use crate::util::net::WebSocketExt;
use crate::util::{cpupower, Invoke};
use crate::Error;
@@ -138,10 +138,7 @@ pub async fn init_postgres(datadir: impl AsRef<Path>) -> Result<(), Error> {
old_version -= 1;
let old_datadir = db_dir.join(old_version.to_string());
if tokio::fs::metadata(&old_datadir).await.is_ok() {
tokio::fs::File::create(&incomplete_path)
.await?
.sync_all()
.await?;
create_file(&incomplete_path).await?.sync_all().await?;
Command::new("pg_upgradecluster")
.arg(old_version.to_string())
.arg("main")

View File

@@ -1,21 +1,21 @@
use std::ops::Deref;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
use clap::builder::ValueParserFactory;
use clap::{value_parser, CommandFactory, FromArgMatches, Parser};
use color_eyre::eyre::eyre;
use emver::VersionRange;
use futures::StreamExt;
use imbl_value::InternedString;
use exver::VersionRange;
use futures::{AsyncWriteExt, StreamExt};
use imbl_value::{json, InternedString};
use itertools::Itertools;
use patch_db::json_ptr::JsonPointer;
use models::VersionString;
use reqwest::header::{HeaderMap, CONTENT_LENGTH};
use reqwest::Url;
use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::HandlerArgs;
use rustyline_async::ReadlineEvent;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use tokio::sync::oneshot;
use tracing::instrument;
use ts_rs::TS;
@@ -23,13 +23,14 @@ use ts_rs::TS;
use crate::context::{CliContext, RpcContext};
use crate::db::model::package::{ManifestPreference, PackageState, PackageStateMatchModelRef};
use crate::prelude::*;
use crate::progress::{FullProgress, PhasedProgressBar};
use crate::progress::{FullProgress, FullProgressTracker, PhasedProgressBar};
use crate::registry::context::{RegistryContext, RegistryUrlParams};
use crate::registry::package::get::GetPackageResponse;
use crate::rpc_continuations::{Guid, RpcContinuation};
use crate::s9pk::manifest::PackageId;
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::S9pk;
use crate::upload::upload;
use crate::util::clap::FromStrParser;
use crate::util::io::open_file;
use crate::util::net::WebSocketExt;
use crate::util::Never;
@@ -38,32 +39,33 @@ pub const PKG_PUBLIC_DIR: &str = "package-data/public";
pub const PKG_WASM_DIR: &str = "package-data/wasm";
// #[command(display(display_serializable))]
pub async fn list(ctx: RpcContext) -> Result<Value, Error> {
Ok(ctx.db.peek().await.as_public().as_package_data().as_entries()?
pub async fn list(ctx: RpcContext) -> Result<Vec<Value>, Error> {
Ok(ctx
.db
.peek()
.await
.as_public()
.as_package_data()
.as_entries()?
.iter()
.filter_map(|(id, pde)| {
let status = match pde.as_state_info().as_match() {
PackageStateMatchModelRef::Installed(_) => {
"installed"
}
PackageStateMatchModelRef::Installing(_) => {
"installing"
}
PackageStateMatchModelRef::Updating(_) => {
"updating"
}
PackageStateMatchModelRef::Restoring(_) => {
"restoring"
}
PackageStateMatchModelRef::Removing(_) => {
"removing"
}
PackageStateMatchModelRef::Error(_) => {
"error"
}
PackageStateMatchModelRef::Installed(_) => "installed",
PackageStateMatchModelRef::Installing(_) => "installing",
PackageStateMatchModelRef::Updating(_) => "updating",
PackageStateMatchModelRef::Restoring(_) => "restoring",
PackageStateMatchModelRef::Removing(_) => "removing",
PackageStateMatchModelRef::Error(_) => "error",
};
serde_json::to_value(json!({ "status": status, "id": id.clone(), "version": pde.as_state_info().as_manifest(ManifestPreference::Old).as_version().de().ok()?}))
.ok()
Some(json!({
"status": status,
"id": id.clone(),
"version": pde.as_state_info()
.as_manifest(ManifestPreference::Old)
.as_version()
.de()
.ok()?
}))
})
.collect())
}
@@ -107,65 +109,57 @@ impl std::fmt::Display for MinMax {
}
}
#[derive(Deserialize, Serialize, Parser, TS)]
#[derive(Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")]
pub struct InstallParams {
#[ts(type = "string")]
registry: Url,
id: PackageId,
#[arg(short = 'm', long = "marketplace-url")]
#[ts(type = "string | null")]
registry: Option<Url>,
#[arg(short = 'v', long = "version-spec")]
version_spec: Option<String>,
#[arg(long = "version-priority")]
version_priority: Option<MinMax>,
version: VersionString,
}
// #[command(
// custom_cli(cli_install(async, context(CliContext))),
// )]
#[instrument(skip_all)]
pub async fn install(
ctx: RpcContext,
InstallParams {
id,
registry,
version_spec,
version_priority,
id,
version,
}: InstallParams,
) -> Result<(), Error> {
let version_str = match &version_spec {
None => "*",
Some(v) => &*v,
};
let version: VersionRange = version_str.parse()?;
let registry = registry.unwrap_or_else(|| crate::DEFAULT_MARKETPLACE.parse().unwrap());
let version_priority = version_priority.unwrap_or_default();
let s9pk = S9pk::deserialize(
&Arc::new(
HttpSource::new(
ctx.client.clone(),
format!(
"{}/package/v0/{}.s9pk?spec={}&version-priority={}",
registry, id, version, version_priority,
)
.parse()?,
)
.await?,
),
None, // TODO
)
.await?;
let package: GetPackageResponse = from_value(
ctx.call_remote_with::<RegistryContext, _>(
"package.get",
json!({
"id": id,
"version": VersionRange::exactly(version.deref().clone()),
}),
RegistryUrlParams {
registry: registry.clone(),
},
)
.await?,
)?;
ensure_code!(
&s9pk.as_manifest().id == &id,
ErrorKind::ValidateS9pk,
"manifest.id does not match expected"
);
let asset = &package
.best
.get(&version)
.ok_or_else(|| {
Error::new(
eyre!("{id}@{version} not found on {registry}"),
ErrorKind::NotFound,
)
})?
.s9pk;
let download = ctx
.services
.install(ctx.clone(), s9pk, None::<Never>)
.install(
ctx.clone(),
|| asset.deserialize_s9pk(ctx.client.clone()),
None::<Never>,
None,
)
.await?;
tokio::spawn(async move { download.await?.await });
@@ -193,113 +187,74 @@ pub async fn sideload(
SideloadParams { session }: SideloadParams,
) -> Result<SideloadResponse, Error> {
let (upload, file) = upload(&ctx, session.clone()).await?;
let (id_send, id_recv) = oneshot::channel();
let (err_send, err_recv) = oneshot::channel();
let progress = Guid::new();
let db = ctx.db.clone();
let mut sub = db
.subscribe(
"/package-data/{id}/install-progress"
.parse::<JsonPointer>()
.with_kind(ErrorKind::Database)?,
)
.await;
ctx.rpc_continuations.add(
progress.clone(),
RpcContinuation::ws_authed(&ctx, session,
|mut ws| {
use axum::extract::ws::Message;
async move {
if let Err(e) = async {
let id = match id_recv.await.map_err(|_| {
Error::new(
eyre!("Could not get id to watch progress"),
ErrorKind::Cancelled,
)
}).and_then(|a|a) {
Ok(a) => a,
Err(e) =>{ ws.send(Message::Text(
serde_json::to_string(&Err::<(), _>(RpcError::from(e.clone_output())))
.with_kind(ErrorKind::Serialization)?,
))
.await
.with_kind(ErrorKind::Network)?;
return Err(e);
}
};
tokio::select! {
res = async {
while let Some(_) = sub.recv().await {
ws.send(Message::Text(
serde_json::to_string(&if let Some(p) = db
.peek()
.await
.as_public()
.as_package_data()
.as_idx(&id)
.and_then(|e| e.as_state_info().as_installing_info()).map(|i| i.as_progress())
{
Ok::<_, ()>(p.de()?)
} else {
let mut p = FullProgress::new();
p.overall.complete();
Ok(p)
})
.with_kind(ErrorKind::Serialization)?,
))
.await
.with_kind(ErrorKind::Network)?;
}
Ok::<_, Error>(())
} => res?,
err = err_recv => {
if let Ok(e) = err {
ws.send(Message::Text(
serde_json::to_string(&Err::<(), _>(e))
.with_kind(ErrorKind::Serialization)?,
))
.await
.with_kind(ErrorKind::Network)?;
let progress_tracker = FullProgressTracker::new();
let mut progress_listener = progress_tracker.stream(Some(Duration::from_millis(200)));
ctx.rpc_continuations
.add(
progress.clone(),
RpcContinuation::ws_authed(
&ctx,
session,
|mut ws| {
use axum::extract::ws::Message;
async move {
if let Err(e) = async {
tokio::select! {
res = async {
while let Some(progress) = progress_listener.next().await {
ws.send(Message::Text(
serde_json::to_string(&Ok::<_, ()>(progress))
.with_kind(ErrorKind::Serialization)?,
))
.await
.with_kind(ErrorKind::Network)?;
}
Ok::<_, Error>(())
} => res?,
err = err_recv => {
if let Ok(e) = err {
ws.send(Message::Text(
serde_json::to_string(&Err::<(), _>(e))
.with_kind(ErrorKind::Serialization)?,
))
.await
.with_kind(ErrorKind::Network)?;
}
}
}
ws.normal_close("complete").await?;
Ok::<_, Error>(())
}
.await
{
tracing::error!("Error tracking sideload progress: {e}");
tracing::debug!("{e:?}");
}
ws.normal_close("complete").await?;
Ok::<_, Error>(())
}
.await
{
tracing::error!("Error tracking sideload progress: {e}");
tracing::debug!("{e:?}");
}
}
},
Duration::from_secs(600),
),
)
.await;
},
Duration::from_secs(600),
),
)
.await;
tokio::spawn(async move {
if let Err(e) = async {
match S9pk::deserialize(
&file, None, // TODO
)
.await
{
Ok(s9pk) => {
let _ = id_send.send(Ok(s9pk.as_manifest().id.clone()));
ctx.services
.install(ctx.clone(), s9pk, None::<Never>)
.await?
.await?
.await?;
file.delete().await
}
Err(e) => {
let _ = id_send.send(Err(e.clone_output()));
return Err(e);
}
}
let key = ctx.db.peek().await.into_private().into_compat_s9pk_key();
ctx.services
.install(
ctx.clone(),
|| crate::s9pk::load(file.clone(), || Ok(key.de()?.0), Some(&progress_tracker)),
None::<Never>,
Some(progress_tracker.clone()),
)
.await?
.await?
.await?;
file.delete().await
}
.await
{
@@ -311,10 +266,16 @@ pub async fn sideload(
Ok(SideloadResponse { upload, progress })
}
#[derive(Deserialize, Serialize, Parser)]
pub struct QueryPackageParams {
id: PackageId,
version: Option<VersionRange>,
}
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum CliInstallParams {
Marketplace(InstallParams),
Marketplace(QueryPackageParams),
Sideload(PathBuf),
}
impl CommandFactory for CliInstallParams {
@@ -328,14 +289,19 @@ impl CommandFactory for CliInstallParams {
.required_unless_present("id")
.value_parser(value_parser!(PathBuf)),
)
.args(InstallParams::command().get_arguments().cloned().map(|a| {
if a.get_id() == "id" {
a.required(false).required_unless_present("sideload")
} else {
a
}
.conflicts_with("sideload")
}))
.args(
QueryPackageParams::command()
.get_arguments()
.cloned()
.map(|a| {
if a.get_id() == "id" {
a.required(false).required_unless_present("sideload")
} else {
a
}
.conflicts_with("sideload")
}),
)
}
fn command_for_update() -> clap::Command {
Self::command()
@@ -346,7 +312,9 @@ impl FromArgMatches for CliInstallParams {
if let Some(sideload) = matches.get_one::<PathBuf>("sideload") {
Ok(Self::Sideload(sideload.clone()))
} else {
Ok(Self::Marketplace(InstallParams::from_arg_matches(matches)?))
Ok(Self::Marketplace(QueryPackageParams::from_arg_matches(
matches,
)?))
}
}
fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> {
@@ -355,6 +323,35 @@ impl FromArgMatches for CliInstallParams {
}
}
#[derive(Deserialize, Serialize, Parser, TS)]
#[ts(export)]
pub struct InstalledVersionParams {
id: PackageId,
}
pub async fn installed_version(
ctx: RpcContext,
InstalledVersionParams { id }: InstalledVersionParams,
) -> Result<Option<VersionString>, Error> {
if let Some(pde) = ctx
.db
.peek()
.await
.into_public()
.into_package_data()
.into_idx(&id)
{
Ok(Some(
pde.into_state_info()
.as_manifest(ManifestPreference::Old)
.as_version()
.de()?,
))
} else {
Ok(None)
}
}
#[instrument(skip_all)]
pub async fn cli_install(
HandlerArgs {
@@ -368,7 +365,7 @@ pub async fn cli_install(
let method = parent_method.into_iter().chain(method).collect_vec();
match params {
CliInstallParams::Sideload(path) => {
let file = crate::s9pk::load(&ctx, path).await?;
let file = open_file(path).await?;
// rpc call remote sideload
let SideloadResponse { upload, progress } = from_value::<SideloadResponse>(
@@ -435,9 +432,70 @@ pub async fn cli_install(
progress?;
upload?;
}
CliInstallParams::Marketplace(params) => {
ctx.call_remote::<RpcContext>(&method.join("."), to_value(&params)?)
.await?;
CliInstallParams::Marketplace(QueryPackageParams { id, version }) => {
let source_version: Option<VersionString> = from_value(
ctx.call_remote::<RpcContext>("package.installed-version", json!({ "id": &id }))
.await?,
)?;
let mut packages: GetPackageResponse = from_value(
ctx.call_remote::<RegistryContext>(
"package.get",
json!({ "id": &id, "version": version, "sourceVersion": source_version }),
)
.await?,
)?;
let version = if packages.best.len() == 1 {
packages.best.pop_first().map(|(k, _)| k).unwrap()
} else {
println!("Multiple flavors of {id} found. Please select one of the following versions to install:");
let version;
loop {
let (mut read, mut output) = rustyline_async::Readline::new("> ".into())
.with_kind(ErrorKind::Filesystem)?;
for (idx, version) in packages.best.keys().enumerate() {
output
.write_all(format!(" {}) {}\n", idx + 1, version).as_bytes())
.await?;
read.add_history_entry(version.to_string());
}
if let ReadlineEvent::Line(line) = read.readline().await? {
let trimmed = line.trim();
match trimmed.parse() {
Ok(v) => {
if let Some((k, _)) = packages.best.remove_entry(&v) {
version = k;
break;
}
}
Err(_) => match trimmed.parse::<usize>() {
Ok(i) if (1..=packages.best.len()).contains(&i) => {
version = packages.best.keys().nth(i - 1).unwrap().clone();
break;
}
_ => (),
},
}
eprintln!("invalid selection: {trimmed}");
println!("Please select one of the following versions to install:");
} else {
return Err(Error::new(
eyre!("Could not determine precise version to install"),
ErrorKind::InvalidRequest,
)
.into());
}
}
version
};
ctx.call_remote::<RpcContext>(
&method.join("."),
to_value(&InstallParams {
id,
registry: ctx.registry_url.clone().or_not_found("--registry")?,
version,
})?,
)
.await?;
}
}
Ok(())

View File

@@ -1,4 +1,4 @@
pub const DEFAULT_MARKETPLACE: &str = "https://registry.start9.com";
pub const DEFAULT_REGISTRY: &str = "https://registry.start9.com";
// pub const COMMUNITY_MARKETPLACE: &str = "https://community-registry.start9.com";
pub const HOST_IP: [u8; 4] = [172, 18, 0, 1];
pub use std::env::consts::ARCH;
@@ -263,6 +263,12 @@ pub fn package<C: Context>() -> ParentHandler<C> {
.with_display_serializable()
.with_call_remote::<CliContext>(),
)
.subcommand(
"installed-version",
from_fn_async(install::installed_version)
.with_display_serializable()
.with_call_remote::<CliContext>(),
)
.subcommand("config", config::config::<C>())
.subcommand(
"start",

View File

@@ -12,7 +12,6 @@ use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{GenericRpcMethod, RpcRequest, RpcResponse};
use rustyline_async::{ReadlineEvent, SharedWriter};
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio::process::Command;
use tokio::sync::Mutex;
@@ -30,6 +29,7 @@ use crate::disk::mount::util::unmount;
use crate::prelude::*;
use crate::rpc_continuations::{Guid, RpcContinuation};
use crate::util::clap::FromStrParser;
use crate::util::io::open_file;
use crate::util::rpc_client::UnixRpcClient;
use crate::util::{new_guid, Invoke};
@@ -342,7 +342,7 @@ impl Drop for LxcContainer {
if let Err(e) = async {
let err_path = rootfs.path().join("var/log/containerRuntime.err");
if tokio::fs::metadata(&err_path).await.is_ok() {
let mut lines = BufReader::new(File::open(&err_path).await?).lines();
let mut lines = BufReader::new(open_file(&err_path).await?).lines();
while let Some(line) = lines.next_line().await? {
let container = &**guid;
tracing::error!(container, "{}", line);

View File

@@ -5,8 +5,6 @@ use models::{HostId, ServiceInterfaceId};
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use crate::net::host::address::HostAddress;
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]

View File

@@ -19,7 +19,6 @@ use new_mime_guess::MimeGuess;
use openssl::hash::MessageDigest;
use openssl::x509::X509;
use rpc_toolkit::{Context, HttpServer, Server};
use tokio::fs::File;
use tokio::io::BufReader;
use tokio_util::io::ReaderStream;
@@ -29,6 +28,7 @@ use crate::middleware::auth::{Auth, HasValidSession};
use crate::middleware::cors::Cors;
use crate::middleware::db::SyncDb;
use crate::rpc_continuations::{Guid, RpcContinuations};
use crate::util::io::open_file;
use crate::{
diagnostic_api, init_api, install_api, main_api, setup_api, Error, ErrorKind, ResultExt,
};
@@ -44,8 +44,6 @@ const EMBEDDED_UIS: Dir<'_> =
#[cfg(not(all(feature = "daemon", not(feature = "test"))))]
const EMBEDDED_UIS: Dir<'_> = Dir::new("", &[]);
const PROXY_STRIP_HEADERS: &[&str] = &["cookie", "host", "origin", "referer", "user-agent"];
#[derive(Clone)]
pub enum UiMode {
Setup,
@@ -340,9 +338,8 @@ impl FileData {
.any(|e| e == "gzip")
.then_some("gzip");
let file = File::open(path)
.await
.with_ctx(|_| (ErrorKind::Filesystem, path.display().to_string()))?;
let file = open_file(path)
.await?;
let metadata = file
.metadata()
.await

View File

@@ -74,6 +74,7 @@ pub async fn list(
.as_notifications()
.as_entries()?
.into_iter()
.rev()
.take(limit);
let notifs = records
.into_iter()
@@ -97,6 +98,7 @@ pub async fn list(
.as_entries()?
.into_iter()
.filter(|(id, _)| *id < before)
.rev()
.take(limit);
records
.into_iter()

View File

@@ -21,7 +21,7 @@ use crate::disk::OsPartitionInfo;
use crate::net::utils::find_eth_iface;
use crate::prelude::*;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::util::io::TmpDir;
use crate::util::io::{open_file, TmpDir};
use crate::util::serde::IoFormat;
use crate::util::Invoke;
use crate::ARCH;
@@ -241,12 +241,10 @@ pub async fn execute<C: Context>(
tokio::fs::create_dir_all(&images_path).await?;
let image_path = images_path
.join(hex::encode(
&MultiCursorFile::from(
tokio::fs::File::open("/run/live/medium/live/filesystem.squashfs").await?,
)
.blake3_mmap()
.await?
.as_bytes()[..16],
&MultiCursorFile::from(open_file("/run/live/medium/live/filesystem.squashfs").await?)
.blake3_mmap()
.await?
.as_bytes()[..16],
))
.with_extension("rootfs");
tokio::fs::copy("/run/live/medium/live/filesystem.squashfs", &image_path).await?;

View File

@@ -1,4 +1,5 @@
use std::collections::HashMap;
use std::sync::Arc;
use reqwest::Client;
use serde::{Deserialize, Serialize};
@@ -7,10 +8,13 @@ use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment;
use crate::registry::signer::commitment::{Commitment, Digestable};
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey};
use crate::registry::signer::AcceptSigners;
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::merkle_archive::source::Section;
use crate::s9pk::S9pk;
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
@@ -52,3 +56,15 @@ impl<C: for<'a> Commitment<&'a HttpSource>> RegistryAsset<C> {
.await
}
}
impl RegistryAsset<MerkleArchiveCommitment> {
pub async fn deserialize_s9pk(
&self,
client: Client,
) -> Result<S9pk<Section<Arc<HttpSource>>>, Error> {
S9pk::deserialize(
&Arc::new(HttpSource::new(client, self.url.clone()).await?),
Some(&self.commitment),
)
.await
}
}

View File

@@ -4,7 +4,7 @@ use std::ops::Deref;
use axum::extract::Request;
use axum::response::Response;
use emver::{Version, VersionRange};
use exver::{Version, VersionRange};
use http::HeaderValue;
use imbl_value::InternedString;
use rpc_toolkit::{Middleware, RpcRequest, RpcResponse};

View File

@@ -1,5 +1,4 @@
use std::collections::{BTreeMap, BTreeSet};
use std::net::SocketAddr;
use axum::Router;
use futures::future::ready;

View File

@@ -3,7 +3,6 @@ use std::panic::UnwindSafe;
use std::path::PathBuf;
use clap::Parser;
use helpers::NonDetachingJoinHandle;
use imbl_value::InternedString;
use itertools::Itertools;
use rpc_toolkit::{from_fn_async, Context, HandlerArgs, HandlerExt, ParentHandler};
@@ -13,7 +12,7 @@ use url::Url;
use crate::context::CliContext;
use crate::prelude::*;
use crate::progress::{FullProgressTracker, PhasedProgressBar};
use crate::progress::{FullProgressTracker};
use crate::registry::asset::RegistryAsset;
use crate::registry::context::RegistryContext;
use crate::registry::os::index::OsVersionInfo;
@@ -25,6 +24,7 @@ use crate::s9pk::merkle_archive::hash::VerifyingWriter;
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::io::open_file;
use crate::util::serde::Base64;
use crate::util::VersionString;
@@ -184,7 +184,7 @@ pub async fn cli_add_asset(
}
};
let file = MultiCursorFile::from(tokio::fs::File::open(&path).await?);
let file = MultiCursorFile::from(open_file(&path).await?);
let progress = FullProgressTracker::new();
let mut sign_phase = progress.add_phase(InternedString::intern("Signing File"), Some(10));

View File

@@ -20,6 +20,7 @@ use crate::registry::os::SIG_CONTEXT;
use crate::registry::signer::commitment::blake3::Blake3Commitment;
use crate::registry::signer::commitment::Commitment;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::util::io::open_file;
use crate::util::VersionString;
pub fn get_api<C: Context>() -> ParentHandler<C> {
@@ -158,9 +159,7 @@ async fn cli_get_os_asset(
if let Some(mut reverify_phase) = reverify_phase {
reverify_phase.start();
res.commitment
.check(&MultiCursorFile::from(
tokio::fs::File::open(download).await?,
))
.check(&MultiCursorFile::from(open_file(download).await?))
.await?;
reverify_phase.complete();
}

View File

@@ -21,6 +21,7 @@ use crate::registry::signer::sign::ed25519::Ed25519;
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme};
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::io::open_file;
use crate::util::serde::Base64;
use crate::util::VersionString;
@@ -166,7 +167,7 @@ pub async fn cli_sign_asset(
}
};
let file = MultiCursorFile::from(tokio::fs::File::open(&path).await?);
let file = MultiCursorFile::from(open_file(&path).await?);
let progress = FullProgressTracker::new();
let mut sign_phase = progress.add_phase(InternedString::intern("Signing File"), Some(10));

View File

@@ -1,6 +1,6 @@
use std::collections::{BTreeMap, BTreeSet};
use emver::VersionRange;
use exver::VersionRange;
use imbl_value::InternedString;
use serde::{Deserialize, Serialize};
use ts_rs::TS;

View File

@@ -2,7 +2,7 @@ use std::collections::BTreeMap;
use chrono::Utc;
use clap::Parser;
use emver::VersionRange;
use exver::VersionRange;
use itertools::Itertools;
use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize};
@@ -148,10 +148,11 @@ pub async fn get_version(
if let (Some(pool), Some(server_id), Some(arch)) = (&ctx.pool, server_id, arch) {
let created_at = Utc::now();
query!("INSERT INTO user_activity (created_at, server_id, arch) VALUES ($1, $2, $3)",
created_at,
server_id,
arch
query!(
"INSERT INTO user_activity (created_at, server_id, arch) VALUES ($1, $2, $3)",
created_at,
server_id,
arch
)
.execute(pool)
.await?;

View File

@@ -1,7 +1,7 @@
use std::collections::{BTreeMap, BTreeSet};
use clap::{Parser, ValueEnum};
use emver::{Version, VersionRange};
use exver::{ExtendedVersion, VersionRange};
use imbl_value::InternedString;
use itertools::Itertools;
use models::PackageId;
@@ -45,8 +45,7 @@ pub struct GetPackageParams {
pub id: Option<PackageId>,
#[ts(type = "string | null")]
pub version: Option<VersionRange>,
#[ts(type = "string | null")]
pub source_version: Option<Version>,
pub source_version: Option<VersionString>,
#[ts(skip)]
#[arg(skip)]
#[serde(rename = "__device_info")]
@@ -132,7 +131,7 @@ fn get_matching_models<'a>(
device_info,
..
}: &GetPackageParams,
) -> Result<Vec<(PackageId, Version, &'a Model<PackageVersionInfo>)>, Error> {
) -> Result<Vec<(PackageId, ExtendedVersion, &'a Model<PackageVersionInfo>)>, Error> {
if let Some(id) = id {
if let Some(pkg) = db.as_packages().as_idx(id) {
vec![(id.clone(), pkg)]
@@ -166,7 +165,7 @@ fn get_matching_models<'a>(
.as_ref()
.map_or(Ok(true), |device_info| info.works_for_device(device_info))?
{
Some((k.clone(), Version::from(v), info))
Some((k.clone(), ExtendedVersion::from(v), info))
} else {
None
},

View File

@@ -1,6 +1,6 @@
use std::collections::{BTreeMap, BTreeSet};
use emver::{Version, VersionRange};
use exver::{Version, VersionRange};
use imbl_value::InternedString;
use models::{DataUrl, PackageId, VersionString};
use serde::{Deserialize, Serialize};
@@ -70,7 +70,8 @@ pub struct PackageVersionInfo {
pub support_site: Url,
#[ts(type = "string")]
pub marketing_site: Url,
pub os_version: VersionString,
#[ts(type = "string")]
pub os_version: Version,
pub hardware_requirements: HardwareRequirements,
#[ts(type = "string | null")]
pub source_version: Option<VersionRange>,

View File

@@ -4,7 +4,6 @@ use std::str::FromStr;
use ::ed25519::pkcs8::BitStringRef;
use clap::builder::ValueParserFactory;
use der::referenced::OwnedToRef;
use der::{Decode, Encode};
use pkcs8::der::AnyRef;
use pkcs8::{PrivateKeyInfo, SubjectPublicKeyInfo};
use serde::{Deserialize, Serialize};

View File

@@ -274,6 +274,21 @@ impl<S: FileSource + Clone> DirectoryContents<S> {
((_, a), (_, b), _) if !a.as_contents().is_dir() && b.as_contents().is_dir() => {
std::cmp::Ordering::Greater
}
((_, a), (_, b), _)
if a.as_contents().is_missing() && !b.as_contents().is_missing() =>
{
std::cmp::Ordering::Greater
}
((_, a), (_, b), _)
if !a.as_contents().is_missing() && b.as_contents().is_missing() =>
{
std::cmp::Ordering::Less
}
((n_a, a), (n_b, b), _)
if a.as_contents().is_missing() && b.as_contents().is_missing() =>
{
n_a.cmp(n_b)
}
((a, _), (b, _), Some(sort_by)) => sort_by(&***a, &***b),
_ => std::cmp::Ordering::Equal,
}) {

View File

@@ -121,14 +121,14 @@ impl<S: ArchiveSource + Clone> MerkleArchive<Section<S>> {
}
if max_size > *root_maxsize {
return Err(Error::new(
eyre!("merkle root directory max size too large"),
eyre!("root directory max size too large"),
ErrorKind::InvalidSignature,
));
}
} else {
if max_size > CAP_1_MiB as u64 {
return Err(Error::new(
eyre!("merkle root directory max size over 1MiB, cancelling download in case of DOS attack"),
eyre!("root directory max size over 1MiB, cancelling download in case of DOS attack"),
ErrorKind::InvalidSignature,
));
}
@@ -377,6 +377,9 @@ impl<S> EntryContents<S> {
pub fn is_dir(&self) -> bool {
matches!(self, &EntryContents::Directory(_))
}
pub fn is_missing(&self) -> bool {
matches!(self, &EntryContents::Missing)
}
}
impl<S: ArchiveSource + Clone> EntryContents<Section<S>> {
#[instrument(skip_all)]

View File

@@ -4,7 +4,7 @@ use std::sync::{Arc, Mutex};
use std::task::Poll;
use bytes::Bytes;
use futures::{Stream, StreamExt, TryStreamExt};
use futures::{Stream, TryStreamExt};
use reqwest::header::{ACCEPT_RANGES, CONTENT_LENGTH, RANGE};
use reqwest::{Client, Url};
use tokio::io::{AsyncRead, AsyncReadExt, ReadBuf, Take};
@@ -54,11 +54,12 @@ impl HttpSource {
}
}
impl ArchiveSource for HttpSource {
type Reader = HttpReader;
type FetchReader = HttpReader;
type FetchAllReader = StreamReader<BoxStream<'static, Result<Bytes, std::io::Error>>, Bytes>;
async fn size(&self) -> Option<u64> {
self.size
}
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
async fn fetch_all(&self) -> Result<Self::FetchAllReader, Error> {
Ok(StreamReader::new(
self.client
.get(self.url.clone())
@@ -72,7 +73,7 @@ impl ArchiveSource for HttpSource {
.apply(boxed),
))
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
async fn fetch(&self, position: u64, size: u64) -> Result<Self::FetchReader, Error> {
match &self.range_support {
Ok(_) => Ok(HttpReader::Range(
StreamReader::new(if size > 0 {

View File

@@ -10,6 +10,7 @@ use tokio::io::{AsyncRead, AsyncWrite};
use crate::prelude::*;
use crate::s9pk::merkle_archive::hash::VerifyingWriter;
use crate::util::io::{open_file, TmpDir};
pub mod http;
pub mod multi_cursor_file;
@@ -159,7 +160,7 @@ impl FileSource for PathBuf {
Ok(tokio::fs::metadata(self).await?.len())
}
async fn reader(&self) -> Result<Self::Reader, Error> {
Ok(File::open(self).await?)
Ok(open_file(self).await?)
}
}
@@ -180,18 +181,17 @@ impl FileSource for Arc<[u8]> {
}
pub trait ArchiveSource: Send + Sync + Sized + 'static {
type Reader: AsyncRead + Unpin + Send;
type FetchReader: AsyncRead + Unpin + Send;
type FetchAllReader: AsyncRead + Unpin + Send;
fn size(&self) -> impl Future<Output = Option<u64>> + Send {
async { None }
}
fn fetch_all(
&self,
) -> impl Future<Output = Result<impl AsyncRead + Unpin + Send, Error>> + Send;
fn fetch_all(&self) -> impl Future<Output = Result<Self::FetchAllReader, Error>> + Send;
fn fetch(
&self,
position: u64,
size: u64,
) -> impl Future<Output = Result<Self::Reader, Error>> + Send;
) -> impl Future<Output = Result<Self::FetchReader, Error>> + Send;
fn copy_all_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
w: &mut W,
@@ -222,14 +222,15 @@ pub trait ArchiveSource: Send + Sync + Sized + 'static {
}
impl<T: ArchiveSource> ArchiveSource for Arc<T> {
type Reader = T::Reader;
type FetchReader = T::FetchReader;
type FetchAllReader = T::FetchAllReader;
async fn size(&self) -> Option<u64> {
self.deref().size().await
}
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
async fn fetch_all(&self) -> Result<Self::FetchAllReader, Error> {
self.deref().fetch_all().await
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
async fn fetch(&self, position: u64, size: u64) -> Result<Self::FetchReader, Error> {
self.deref().fetch(position, size).await
}
async fn copy_all_to<W: AsyncWrite + Unpin + Send + ?Sized>(
@@ -249,11 +250,12 @@ impl<T: ArchiveSource> ArchiveSource for Arc<T> {
}
impl ArchiveSource for Arc<[u8]> {
type Reader = tokio::io::Take<std::io::Cursor<Self>>;
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
type FetchReader = tokio::io::Take<std::io::Cursor<Self>>;
type FetchAllReader = std::io::Cursor<Self>;
async fn fetch_all(&self) -> Result<Self::FetchAllReader, Error> {
Ok(std::io::Cursor::new(self.clone()))
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
async fn fetch(&self, position: u64, size: u64) -> Result<Self::FetchReader, Error> {
use tokio::io::AsyncReadExt;
let mut cur = std::io::Cursor::new(self.clone());
@@ -269,7 +271,7 @@ pub struct Section<S> {
size: u64,
}
impl<S: ArchiveSource> FileSource for Section<S> {
type Reader = S::Reader;
type Reader = S::FetchReader;
async fn size(&self) -> Result<u64, Error> {
Ok(self.size)
}
@@ -285,3 +287,81 @@ pub type DynRead = Box<dyn AsyncRead + Unpin + Send + Sync + 'static>;
pub fn into_dyn_read<R: AsyncRead + Unpin + Send + Sync + 'static>(r: R) -> DynRead {
Box::new(r)
}
#[derive(Clone)]
pub struct TmpSource<S> {
tmp_dir: Arc<TmpDir>,
source: S,
}
impl<S> TmpSource<S> {
pub fn new(tmp_dir: Arc<TmpDir>, source: S) -> Self {
Self { tmp_dir, source }
}
pub async fn gc(self) -> Result<(), Error> {
self.tmp_dir.gc().await
}
}
impl<S> std::ops::Deref for TmpSource<S> {
type Target = S;
fn deref(&self) -> &Self::Target {
&self.source
}
}
impl<S: ArchiveSource> ArchiveSource for TmpSource<S> {
type FetchReader = <S as ArchiveSource>::FetchReader;
type FetchAllReader = <S as ArchiveSource>::FetchAllReader;
async fn size(&self) -> Option<u64> {
self.source.size().await
}
async fn fetch_all(&self) -> Result<Self::FetchAllReader, Error> {
self.source.fetch_all().await
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::FetchReader, Error> {
self.source.fetch(position, size).await
}
async fn copy_all_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
w: &mut W,
) -> Result<(), Error> {
self.source.copy_all_to(w).await
}
async fn copy_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
position: u64,
size: u64,
w: &mut W,
) -> Result<(), Error> {
self.source.copy_to(position, size, w).await
}
}
impl<S: FileSource> From<TmpSource<S>> for DynFileSource {
fn from(value: TmpSource<S>) -> Self {
DynFileSource::new(value)
}
}
impl<S: FileSource> FileSource for TmpSource<S> {
type Reader = <S as FileSource>::Reader;
async fn size(&self) -> Result<u64, Error> {
self.source.size().await
}
async fn reader(&self) -> Result<Self::Reader, Error> {
self.source.reader().await
}
async fn copy<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
mut w: &mut W,
) -> Result<(), Error> {
self.source.copy(&mut w).await
}
async fn copy_verify<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
mut w: &mut W,
verify: Option<(Hash, u64)>,
) -> Result<(), Error> {
self.source.copy_verify(&mut w, verify).await
}
async fn to_vec(&self, verify: Option<(Hash, u64)>) -> Result<Vec<u8>, Error> {
self.source.to_vec(verify).await
}
}

View File

@@ -6,12 +6,13 @@ use std::sync::Arc;
use std::task::Poll;
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncReadExt, ReadBuf, Take};
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, ReadBuf, Take};
use tokio::sync::{Mutex, OwnedMutexGuard};
use crate::disk::mount::filesystem::loop_dev::LoopDev;
use crate::prelude::*;
use crate::s9pk::merkle_archive::source::{ArchiveSource, Section};
use crate::util::io::open_file;
fn path_from_fd(fd: RawFd) -> Result<PathBuf, Error> {
#[cfg(target_os = "linux")]
@@ -42,7 +43,7 @@ impl MultiCursorFile {
path_from_fd(self.fd)
}
pub async fn open(fd: &impl AsRawFd) -> Result<Self, Error> {
let f = File::open(path_from_fd(fd.as_raw_fd())?).await?;
let f = open_file(path_from_fd(fd.as_raw_fd())?).await?;
Ok(Self::from(f))
}
pub async fn cursor(&self) -> Result<FileCursor, Error> {
@@ -50,7 +51,7 @@ impl MultiCursorFile {
if let Ok(file) = self.file.clone().try_lock_owned() {
file
} else {
Arc::new(Mutex::new(File::open(self.path()?).await?))
Arc::new(Mutex::new(open_file(self.path()?).await?))
.try_lock_owned()
.expect("freshly created")
},
@@ -88,24 +89,48 @@ impl AsyncRead for FileCursor {
Pin::new(&mut (&mut **this.0.get_mut())).poll_read(cx, buf)
}
}
impl AsyncSeek for FileCursor {
fn start_seek(self: Pin<&mut Self>, position: SeekFrom) -> std::io::Result<()> {
let this = self.project();
Pin::new(&mut (&mut **this.0.get_mut())).start_seek(position)
}
fn poll_complete(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<std::io::Result<u64>> {
let this = self.project();
Pin::new(&mut (&mut **this.0.get_mut())).poll_complete(cx)
}
}
impl std::ops::Deref for FileCursor {
type Target = File;
fn deref(&self) -> &Self::Target {
&*self.0
}
}
impl std::ops::DerefMut for FileCursor {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut *self.0
}
}
impl ArchiveSource for MultiCursorFile {
type Reader = Take<FileCursor>;
type FetchReader = Take<FileCursor>;
type FetchAllReader = FileCursor;
async fn size(&self) -> Option<u64> {
tokio::fs::metadata(self.path().ok()?)
.await
.ok()
.map(|m| m.len())
}
#[allow(refining_impl_trait)]
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send + 'static, Error> {
async fn fetch_all(&self) -> Result<Self::FetchAllReader, Error> {
use tokio::io::AsyncSeekExt;
let mut file = self.cursor().await?;
file.0.seek(SeekFrom::Start(0)).await?;
Ok(file)
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
async fn fetch(&self, position: u64, size: u64) -> Result<Self::FetchReader, Error> {
use tokio::io::AsyncSeekExt;
let mut file = self.cursor().await?;

View File

@@ -3,7 +3,7 @@ use tokio::io::{AsyncRead, AsyncWrite};
use crate::prelude::*;
/// Most-significant byte, == 0x80
/// Most-significant bit, == 0x80
pub const MSB: u8 = 0b1000_0000;
const MAX_STR_LEN: u64 = 1024 * 1024; // 1 MiB
@@ -39,22 +39,20 @@ pub async fn serialize_varstring<W: AsyncWrite + Unpin + Send>(
Ok(())
}
const MAX_SIZE: usize = (std::mem::size_of::<u64>() * 8 + 7) / 7;
#[derive(Default)]
struct VarIntProcessor {
buf: [u8; 10],
maxsize: usize,
buf: [u8; MAX_SIZE],
i: usize,
}
impl VarIntProcessor {
fn new() -> VarIntProcessor {
VarIntProcessor {
maxsize: (std::mem::size_of::<u64>() * 8 + 7) / 7,
..VarIntProcessor::default()
}
Self::default()
}
fn push(&mut self, b: u8) -> Result<(), Error> {
if self.i >= self.maxsize {
if self.i >= MAX_SIZE {
return Err(Error::new(
eyre!("Unterminated varint"),
ErrorKind::ParseS9pk,

View File

@@ -4,37 +4,57 @@ pub mod rpc;
pub mod v1;
pub mod v2;
use std::io::SeekFrom;
use std::path::Path;
use std::sync::Arc;
use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncSeekExt};
use tokio::io::{AsyncReadExt, AsyncSeek};
pub use v2::{manifest, S9pk};
use crate::context::CliContext;
use crate::prelude::*;
use crate::progress::FullProgressTracker;
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource};
use crate::s9pk::v1::reader::S9pkReader;
use crate::s9pk::v2::compat::MAGIC_AND_VERSION;
use crate::util::io::TmpDir;
pub async fn load(ctx: &CliContext, path: impl AsRef<Path>) -> Result<File, Error> {
pub async fn load<S, K>(
source: S,
key: K,
progress: Option<&FullProgressTracker>,
) -> Result<S9pk<DynFileSource>, Error>
where
S: ArchiveSource,
S::FetchAllReader: AsyncSeek + Sync,
K: FnOnce() -> Result<ed25519_dalek::SigningKey, Error>,
{
// TODO: return s9pk
const MAGIC_LEN: usize = MAGIC_AND_VERSION.len();
let mut magic = [0_u8; MAGIC_LEN];
let mut file = tokio::fs::File::open(&path).await?;
file.read_exact(&mut magic).await?;
file.seek(SeekFrom::Start(0)).await?;
source.fetch(0, 3).await?.read_exact(&mut magic).await?;
if magic == v2::compat::MAGIC_AND_VERSION {
let phase = if let Some(progress) = progress {
let mut phase = progress.add_phase(
"Converting Package to V2".into(),
Some(source.size().await.unwrap_or(60)),
);
phase.start();
Some(phase)
} else {
None
};
tracing::info!("Converting package to v2 s9pk");
let new_path = path.as_ref().with_extension("compat.s9pk");
S9pk::from_v1(
S9pkReader::from_reader(file, true).await?,
&new_path,
ctx.developer_key()?.clone(),
let tmp_dir = TmpDir::new().await?;
let s9pk = S9pk::from_v1(
S9pkReader::from_reader(source.fetch_all().await?, true).await?,
Arc::new(tmp_dir),
key()?,
)
.await?;
tokio::fs::rename(&new_path, &path).await?;
file = tokio::fs::File::open(&path).await?;
tracing::info!("Converted s9pk successfully");
if let Some(mut phase) = phase {
phase.complete();
}
Ok(s9pk.into_dyn())
} else {
Ok(S9pk::deserialize(&Arc::new(source), None).await?.into_dyn())
}
Ok(file)
}

View File

@@ -1,19 +1,19 @@
use std::path::PathBuf;
use std::sync::Arc;
use clap::Parser;
use models::ImageId;
use rpc_toolkit::{from_fn_async, Empty, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use ts_rs::TS;
use crate::context::CliContext;
use crate::prelude::*;
use crate::s9pk::manifest::Manifest;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::v2::pack::ImageConfig;
use crate::s9pk::v2::SIG_CONTEXT;
use crate::s9pk::S9pk;
use crate::util::io::TmpDir;
use crate::util::io::{create_file, open_file, TmpDir};
use crate::util::serde::{apply_expr, HandlerExtSerde};
pub const SKIP_ENV: &[&str] = &["TERM", "container", "HOME", "HOSTNAME"];
@@ -79,19 +79,25 @@ async fn add_image(
AddImageParams { id, config }: AddImageParams,
S9pkPath { s9pk: s9pk_path }: S9pkPath,
) -> Result<(), Error> {
let mut s9pk = S9pk::from_file(super::load(&ctx, &s9pk_path).await?)
.await?
.into_dyn();
let mut s9pk = super::load(
MultiCursorFile::from(open_file(&s9pk_path).await?),
|| ctx.developer_key().cloned(),
None,
)
.await?;
s9pk.as_manifest_mut().images.insert(id, config);
let tmpdir = TmpDir::new().await?;
s9pk.load_images(&tmpdir).await?;
let tmp_dir = Arc::new(TmpDir::new().await?);
s9pk.load_images(tmp_dir.clone()).await?;
s9pk.validate_and_filter(None)?;
let tmp_path = s9pk_path.with_extension("s9pk.tmp");
let mut tmp_file = File::create(&tmp_path).await?;
let mut tmp_file = create_file(&tmp_path).await?;
s9pk.serialize(&mut tmp_file, true).await?;
drop(s9pk);
tmp_file.sync_all().await?;
tokio::fs::rename(&tmp_path, &s9pk_path).await?;
tmp_dir.gc().await?;
Ok(())
}
@@ -104,13 +110,18 @@ async fn edit_manifest(
EditManifestParams { expression }: EditManifestParams,
S9pkPath { s9pk: s9pk_path }: S9pkPath,
) -> Result<Manifest, Error> {
let mut s9pk = S9pk::from_file(super::load(&ctx, &s9pk_path).await?).await?;
let mut s9pk = super::load(
MultiCursorFile::from(open_file(&s9pk_path).await?),
|| ctx.developer_key().cloned(),
None,
)
.await?;
let old = serde_json::to_value(s9pk.as_manifest()).with_kind(ErrorKind::Serialization)?;
*s9pk.as_manifest_mut() = serde_json::from_value(apply_expr(old.into(), &expression)?.into())
.with_kind(ErrorKind::Serialization)?;
let manifest = s9pk.as_manifest().clone();
let tmp_path = s9pk_path.with_extension("s9pk.tmp");
let mut tmp_file = File::create(&tmp_path).await?;
let mut tmp_file = create_file(&tmp_path).await?;
s9pk.as_archive_mut()
.set_signer(ctx.developer_key()?.clone(), SIG_CONTEXT);
s9pk.serialize(&mut tmp_file, true).await?;
@@ -123,9 +134,14 @@ async fn edit_manifest(
async fn file_tree(
ctx: CliContext,
_: Empty,
S9pkPath { s9pk }: S9pkPath,
S9pkPath { s9pk: s9pk_path }: S9pkPath,
) -> Result<Vec<PathBuf>, Error> {
let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?;
let s9pk = super::load(
MultiCursorFile::from(open_file(&s9pk_path).await?),
|| ctx.developer_key().cloned(),
None,
)
.await?;
Ok(s9pk.as_archive().contents().file_paths(""))
}
@@ -138,11 +154,16 @@ struct CatParams {
async fn cat(
ctx: CliContext,
CatParams { file_path }: CatParams,
S9pkPath { s9pk }: S9pkPath,
S9pkPath { s9pk: s9pk_path }: S9pkPath,
) -> Result<(), Error> {
use crate::s9pk::merkle_archive::source::FileSource;
let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?;
let s9pk = super::load(
MultiCursorFile::from(open_file(&s9pk_path).await?),
|| ctx.developer_key().cloned(),
None,
)
.await?;
tokio::io::copy(
&mut s9pk
.as_archive()
@@ -162,8 +183,13 @@ async fn cat(
async fn inspect_manifest(
ctx: CliContext,
_: Empty,
S9pkPath { s9pk }: S9pkPath,
S9pkPath { s9pk: s9pk_path }: S9pkPath,
) -> Result<Manifest, Error> {
let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?;
let s9pk = super::load(
MultiCursorFile::from(open_file(&s9pk_path).await?),
|| ctx.developer_key().cloned(),
None,
)
.await?;
Ok(s9pk.as_manifest().clone())
}

View File

@@ -1,8 +1,7 @@
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use emver::VersionRange;
use imbl_value::InOMap;
use exver::{Version, VersionRange};
use indexmap::IndexMap;
pub use models::PackageId;
use models::{ActionId, HealthCheckId, ImageId, VolumeId};
@@ -13,23 +12,16 @@ use crate::prelude::*;
use crate::s9pk::git_hash::GitHash;
use crate::s9pk::manifest::{Alerts, Description, HardwareRequirements};
use crate::util::serde::{Duration, IoFormat};
use crate::util::VersionString;
use crate::version::{Current, VersionT};
fn current_version() -> VersionString {
Current::new().semver().into()
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct Manifest {
#[serde(default = "current_version")]
pub eos_version: VersionString,
pub eos_version: Version,
pub id: PackageId,
#[serde(default)]
pub git_hash: Option<GitHash>,
pub title: String,
pub version: VersionString,
pub version: exver::emver::Version,
pub description: Description,
#[serde(default)]
pub assets: Assets,

View File

@@ -20,6 +20,7 @@ use super::header::{FileSection, Header, TableOfContents};
use super::SIG_CONTEXT;
use crate::prelude::*;
use crate::s9pk::v1::docker::DockerReader;
use crate::util::io::open_file;
use crate::util::VersionString;
#[pin_project::pin_project]
@@ -150,9 +151,7 @@ pub struct S9pkReader<R: AsyncRead + AsyncSeek + Unpin + Send + Sync = BufReader
impl S9pkReader {
pub async fn open<P: AsRef<Path>>(path: P, check_sig: bool) -> Result<Self, Error> {
let p = path.as_ref();
let rdr = File::open(p)
.await
.with_ctx(|_| (crate::error::ErrorKind::Filesystem, p.display().to_string()))?;
let rdr = open_file(p).await?;
Self::from_reader(BufReader::new(rdr), check_sig).await
}

View File

@@ -2,9 +2,8 @@ use std::collections::BTreeMap;
use std::path::Path;
use std::sync::Arc;
use itertools::Itertools;
use exver::ExtendedVersion;
use models::ImageId;
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncSeek, AsyncWriteExt};
use tokio::process::Command;
@@ -12,29 +11,35 @@ use crate::dependencies::{DepInfo, Dependencies};
use crate::prelude::*;
use crate::s9pk::manifest::Manifest;
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::Section;
use crate::s9pk::merkle_archive::source::TmpSource;
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
use crate::s9pk::rpc::SKIP_ENV;
use crate::s9pk::v1::manifest::{Manifest as ManifestV1, PackageProcedure};
use crate::s9pk::v1::reader::S9pkReader;
use crate::s9pk::v2::pack::{PackSource, CONTAINER_TOOL};
use crate::s9pk::v2::pack::{ImageSource, PackSource, CONTAINER_TOOL};
use crate::s9pk::v2::{S9pk, SIG_CONTEXT};
use crate::util::io::TmpDir;
use crate::util::io::{create_file, TmpDir};
use crate::util::Invoke;
pub const MAGIC_AND_VERSION: &[u8] = &[0x3b, 0x3b, 0x01];
impl S9pk<Section<MultiCursorFile>> {
impl S9pk<TmpSource<PackSource>> {
#[instrument(skip_all)]
pub async fn from_v1<R: AsyncRead + AsyncSeek + Unpin + Send + Sync>(
mut reader: S9pkReader<R>,
destination: impl AsRef<Path>,
tmp_dir: Arc<TmpDir>,
signer: ed25519_dalek::SigningKey,
) -> Result<Self, Error> {
let scratch_dir = TmpDir::new().await?;
Command::new(CONTAINER_TOOL)
.arg("run")
.arg("--rm")
.arg("--privileged")
.arg("tonistiigi/binfmt")
.arg("--install")
.arg("all")
.invoke(ErrorKind::Docker)
.await?;
let mut archive = DirectoryContents::<PackSource>::new();
let mut archive = DirectoryContents::<TmpSource<PackSource>>::new();
// manifest.json
let manifest_raw = reader.manifest().await?;
@@ -56,33 +61,35 @@ impl S9pk<Section<MultiCursorFile>> {
let license: Arc<[u8]> = reader.license().await?.to_vec().await?.into();
archive.insert_path(
"LICENSE.md",
Entry::file(PackSource::Buffered(license.into())),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(license.into()),
)),
)?;
// instructions.md
let instructions: Arc<[u8]> = reader.instructions().await?.to_vec().await?.into();
archive.insert_path(
"instructions.md",
Entry::file(PackSource::Buffered(instructions.into())),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(instructions.into()),
)),
)?;
// icon.md
let icon: Arc<[u8]> = reader.icon().await?.to_vec().await?.into();
archive.insert_path(
format!("icon.{}", manifest.assets.icon_type()),
Entry::file(PackSource::Buffered(icon.into())),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(icon.into()),
)),
)?;
// images
for arch in reader.docker_arches().await? {
let images_dir = scratch_dir.join("images").join(&arch);
let docker_platform = if arch == "x86_64" {
"--platform=linux/amd64".to_owned()
} else if arch == "aarch64" {
"--platform=linux/arm64".to_owned()
} else {
format!("--platform=linux/{arch}")
};
let images_dir = tmp_dir.join("images").join(&arch);
tokio::fs::create_dir_all(&images_dir).await?;
Command::new(CONTAINER_TOOL)
.arg("load")
@@ -93,97 +100,24 @@ impl S9pk<Section<MultiCursorFile>> {
let mut image_config = new_manifest.images.remove(image).unwrap_or_default();
image_config.arch.insert(arch.as_str().into());
new_manifest.images.insert(image.clone(), image_config);
let sqfs_path = images_dir.join(image).with_extension("squashfs");
let image_name = if *system {
format!("start9/{}:latest", image)
} else {
format!("start9/{}/{}:{}", manifest.id, image, manifest.version)
};
let id = String::from_utf8(
Command::new(CONTAINER_TOOL)
.arg("create")
.arg(&docker_platform)
.arg(&image_name)
.invoke(ErrorKind::Docker)
.await?,
)?;
let env = String::from_utf8(
Command::new(CONTAINER_TOOL)
.arg("run")
.arg("--rm")
.arg(&docker_platform)
.arg("--entrypoint")
.arg("env")
.arg(&image_name)
.invoke(ErrorKind::Docker)
.await?,
)?
.lines()
.filter(|l| {
l.trim()
.split_once("=")
.map_or(false, |(v, _)| !SKIP_ENV.contains(&v))
})
.join("\n")
+ "\n";
let workdir = Path::new(
String::from_utf8(
Command::new(CONTAINER_TOOL)
.arg("run")
.arg("--rm")
.arg(&docker_platform)
.arg("--entrypoint")
.arg("pwd")
.arg(&image_name)
.invoke(ErrorKind::Docker)
.await?,
)?
.trim(),
)
.to_owned();
Command::new("bash")
.arg("-c")
.arg(format!(
"{CONTAINER_TOOL} export {id} | mksquashfs - {sqfs} -tar",
id = id.trim(),
sqfs = sqfs_path.display()
))
.invoke(ErrorKind::Docker)
ImageSource::DockerTag(image_name.clone())
.load(
tmp_dir.clone(),
&new_manifest.id,
&new_manifest.version,
image,
&arch,
&mut archive,
)
.await?;
Command::new(CONTAINER_TOOL)
.arg("rm")
.arg(id.trim())
.invoke(ErrorKind::Docker)
.await?;
archive.insert_path(
Path::new("images")
.join(&arch)
.join(&image)
.with_extension("squashfs"),
Entry::file(PackSource::File(sqfs_path)),
)?;
archive.insert_path(
Path::new("images")
.join(&arch)
.join(&image)
.with_extension("env"),
Entry::file(PackSource::Buffered(Vec::from(env).into())),
)?;
archive.insert_path(
Path::new("images")
.join(&arch)
.join(&image)
.with_extension("json"),
Entry::file(PackSource::Buffered(
serde_json::to_vec(&serde_json::json!({
"workdir": workdir
}))
.with_kind(ErrorKind::Serialization)?
.into(),
)),
)?;
Command::new(CONTAINER_TOOL)
.arg("rmi")
.arg("-f")
.arg(&image_name)
.invoke(ErrorKind::Docker)
.await?;
@@ -191,7 +125,7 @@ impl S9pk<Section<MultiCursorFile>> {
}
// assets
let asset_dir = scratch_dir.join("assets");
let asset_dir = tmp_dir.join("assets");
tokio::fs::create_dir_all(&asset_dir).await?;
tokio_tar::Archive::new(reader.assets().await?)
.unpack(&asset_dir)
@@ -212,21 +146,21 @@ impl S9pk<Section<MultiCursorFile>> {
Path::new("assets")
.join(&asset_id)
.with_extension("squashfs"),
Entry::file(PackSource::File(sqfs_path)),
Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(sqfs_path))),
)?;
}
// javascript
let js_dir = scratch_dir.join("javascript");
let js_dir = tmp_dir.join("javascript");
let sqfs_path = js_dir.with_extension("squashfs");
tokio::fs::create_dir_all(&js_dir).await?;
if let Some(mut scripts) = reader.scripts().await? {
let mut js_file = File::create(js_dir.join("embassy.js")).await?;
let mut js_file = create_file(js_dir.join("embassy.js")).await?;
tokio::io::copy(&mut scripts, &mut js_file).await?;
js_file.sync_all().await?;
}
{
let mut js_file = File::create(js_dir.join("embassyManifest.json")).await?;
let mut js_file = create_file(js_dir.join("embassyManifest.json")).await?;
js_file
.write_all(&serde_json::to_vec(&manifest_raw).with_kind(ErrorKind::Serialization)?)
.await?;
@@ -239,30 +173,24 @@ impl S9pk<Section<MultiCursorFile>> {
.await?;
archive.insert_path(
Path::new("javascript.squashfs"),
Entry::file(PackSource::File(sqfs_path)),
Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(sqfs_path))),
)?;
archive.insert_path(
"manifest.json",
Entry::file(PackSource::Buffered(
serde_json::to_vec::<Manifest>(&new_manifest)
.with_kind(ErrorKind::Serialization)?
.into(),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(
serde_json::to_vec::<Manifest>(&new_manifest)
.with_kind(ErrorKind::Serialization)?
.into(),
),
)),
)?;
let mut s9pk = S9pk::new(MerkleArchive::new(archive, signer, SIG_CONTEXT), None).await?;
let mut dest_file = File::create(destination.as_ref()).await?;
s9pk.serialize(&mut dest_file, false).await?;
dest_file.sync_all().await?;
scratch_dir.delete().await?;
Ok(S9pk::deserialize(
&MultiCursorFile::from(File::open(destination.as_ref()).await?),
None,
)
.await?)
let mut res = S9pk::new(MerkleArchive::new(archive, signer, SIG_CONTEXT), None).await?;
res.as_archive_mut().update_hashes(true).await?;
Ok(res)
}
}
@@ -272,7 +200,7 @@ impl From<ManifestV1> for Manifest {
Self {
id: value.id,
title: value.title,
version: value.version,
version: ExtendedVersion::from(value.version).into(),
release_notes: value.release_notes,
license: value.license.into(),
wrapper_repo: value.wrapper_repo,

View File

@@ -2,6 +2,7 @@ use std::collections::{BTreeMap, BTreeSet};
use std::path::Path;
use color_eyre::eyre::eyre;
use exver::Version;
use helpers::const_true;
use imbl_value::InternedString;
pub use models::PackageId;
@@ -20,8 +21,8 @@ use crate::util::serde::Regex;
use crate::util::VersionString;
use crate::version::{Current, VersionT};
fn current_version() -> VersionString {
Current::new().semver().into()
fn current_version() -> Version {
Current::new().semver()
}
#[derive(Clone, Debug, Deserialize, Serialize, HasModel, TS)]
@@ -59,7 +60,8 @@ pub struct Manifest {
#[ts(type = "string | null")]
pub git_hash: Option<GitHash>,
#[serde(default = "current_version")]
pub os_version: VersionString,
#[ts(type = "string")]
pub os_version: Version,
#[serde(default = "const_true")]
pub has_config: bool,
}

View File

@@ -12,10 +12,12 @@ use crate::s9pk::manifest::Manifest;
use crate::s9pk::merkle_archive::file_contents::FileContents;
use crate::s9pk::merkle_archive::sink::Sink;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section};
use crate::s9pk::merkle_archive::source::{
ArchiveSource, DynFileSource, FileSource, Section, TmpSource,
};
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
use crate::s9pk::v2::pack::{ImageSource, PackSource};
use crate::util::io::TmpDir;
use crate::util::io::{open_file, TmpDir};
const MAGIC_AND_VERSION: &[u8] = &[0x3b, 0x3b, 0x02];
@@ -165,8 +167,8 @@ impl<S: FileSource + Clone> S9pk<S> {
}
}
impl<S: From<PackSource> + FileSource + Clone> S9pk<S> {
pub async fn load_images(&mut self, tmpdir: &TmpDir) -> Result<(), Error> {
impl<S: From<TmpSource<PackSource>> + FileSource + Clone> S9pk<S> {
pub async fn load_images(&mut self, tmp_dir: Arc<TmpDir>) -> Result<(), Error> {
let id = &self.manifest.id;
let version = &self.manifest.version;
for (image_id, image_config) in &mut self.manifest.images {
@@ -175,7 +177,7 @@ impl<S: From<PackSource> + FileSource + Clone> S9pk<S> {
image_config
.source
.load(
tmpdir,
tmp_dir.clone(),
id,
version,
image_id,
@@ -206,7 +208,7 @@ impl<S: ArchiveSource + Clone> S9pk<Section<S>> {
)
.await?;
let mut magic_version = [0u8; 3];
let mut magic_version = [0u8; MAGIC_AND_VERSION.len()];
header.read_exact(&mut magic_version).await?;
ensure_code!(
&magic_version == MAGIC_AND_VERSION,
@@ -232,7 +234,7 @@ impl S9pk {
Self::deserialize(&MultiCursorFile::from(file), None).await
}
pub async fn open(path: impl AsRef<Path>, id: Option<&PackageId>) -> Result<Self, Error> {
let res = Self::from_file(tokio::fs::File::open(path).await?).await?;
let res = Self::from_file(open_file(path).await?).await?;
if let Some(id) = id {
ensure_code!(
&res.as_manifest().id == id,

View File

@@ -10,7 +10,6 @@ use futures::{FutureExt, TryStreamExt};
use imbl_value::InternedString;
use models::{ImageId, PackageId, VersionString};
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use tokio::io::AsyncRead;
use tokio::process::Command;
use tokio::sync::OnceCell;
@@ -23,12 +22,12 @@ use crate::rpc_continuations::Guid;
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::{
into_dyn_read, ArchiveSource, DynFileSource, FileSource,
into_dyn_read, ArchiveSource, DynFileSource, FileSource, TmpSource,
};
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
use crate::s9pk::v2::SIG_CONTEXT;
use crate::s9pk::S9pk;
use crate::util::io::TmpDir;
use crate::util::io::{create_file, open_file, TmpDir};
use crate::util::Invoke;
#[cfg(not(feature = "docker"))]
@@ -64,7 +63,7 @@ impl SqfsDir {
.invoke(ErrorKind::Filesystem)
.await?;
Ok(MultiCursorFile::from(
File::open(&path)
open_file(&path)
.await
.with_ctx(|_| (ErrorKind::Filesystem, path.display()))?,
))
@@ -100,11 +99,7 @@ impl FileSource for PackSource {
async fn reader(&self) -> Result<Self::Reader, Error> {
match self {
Self::Buffered(a) => Ok(into_dyn_read(Cursor::new(a.clone()))),
Self::File(f) => Ok(into_dyn_read(
File::open(f)
.await
.with_ctx(|_| (ErrorKind::Filesystem, f.display()))?,
)),
Self::File(f) => Ok(into_dyn_read(open_file(f).await?)),
Self::Squashfs(dir) => dir.file().await?.fetch_all().await.map(into_dyn_read),
}
}
@@ -284,9 +279,9 @@ pub enum ImageSource {
}
impl ImageSource {
#[instrument(skip_all)]
pub fn load<'a, S: From<PackSource> + FileSource + Clone>(
pub fn load<'a, S: From<TmpSource<PackSource>> + FileSource + Clone>(
&'a self,
tmpdir: &'a TmpDir,
tmp_dir: Arc<TmpDir>,
id: &'a PackageId,
version: &'a VersionString,
image_id: &'a ImageId,
@@ -331,12 +326,13 @@ impl ImageSource {
.arg(&tag)
.arg(&docker_platform)
.arg("-o")
.arg("type=image")
.arg("type=docker,dest=-")
.capture(false)
.pipe(Command::new(CONTAINER_TOOL).arg("load"))
.invoke(ErrorKind::Docker)
.await?;
ImageSource::DockerTag(tag.clone())
.load(tmpdir, id, version, image_id, arch, into)
.load(tmp_dir, id, version, image_id, arch, into)
.await?;
Command::new(CONTAINER_TOOL)
.arg("rmi")
@@ -390,21 +386,24 @@ impl ImageSource {
into.insert_path(
base_path.with_extension("json"),
Entry::file(
PackSource::Buffered(
serde_json::to_vec(&ImageMetadata {
workdir: if config.working_dir == Path::new("") {
"/".into()
} else {
config.working_dir
},
user: if config.user.is_empty() {
"root".into()
} else {
config.user.into()
},
})
.with_kind(ErrorKind::Serialization)?
.into(),
TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(
serde_json::to_vec(&ImageMetadata {
workdir: if config.working_dir == Path::new("") {
"/".into()
} else {
config.working_dir
},
user: if config.user.is_empty() {
"root".into()
} else {
config.user.into()
},
})
.with_kind(ErrorKind::Serialization)?
.into(),
),
)
.into(),
),
@@ -412,10 +411,16 @@ impl ImageSource {
into.insert_path(
base_path.with_extension("env"),
Entry::file(
PackSource::Buffered(config.env.join("\n").into_bytes().into()).into(),
TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(config.env.join("\n").into_bytes().into()),
)
.into(),
),
)?;
let dest = tmpdir.join(Guid::new().as_ref()).with_extension("squashfs");
let dest = tmp_dir
.join(Guid::new().as_ref())
.with_extension("squashfs");
let container = String::from_utf8(
Command::new(CONTAINER_TOOL)
.arg("create")
@@ -438,7 +443,7 @@ impl ImageSource {
.await?;
into.insert_path(
base_path.with_extension("squashfs"),
Entry::file(PackSource::File(dest).into()),
Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(dest)).into()),
)?;
Ok(())
@@ -460,8 +465,8 @@ pub struct ImageMetadata {
#[instrument(skip_all)]
pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
let tmpdir = Arc::new(TmpDir::new().await?);
let mut files = DirectoryContents::<PackSource>::new();
let tmp_dir = Arc::new(TmpDir::new().await?);
let mut files = DirectoryContents::<TmpSource<PackSource>>::new();
let js_dir = params.javascript();
let manifest: Arc<[u8]> = Command::new("node")
.arg("-e")
@@ -474,7 +479,10 @@ pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
.into();
files.insert(
"manifest.json".into(),
Entry::file(PackSource::Buffered(manifest.clone())),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(manifest.clone()),
)),
);
let icon = params.icon().await?;
let icon_ext = icon
@@ -483,22 +491,28 @@ pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
.to_string_lossy();
files.insert(
InternedString::from_display(&lazy_format!("icon.{}", icon_ext)),
Entry::file(PackSource::File(icon)),
Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(icon))),
);
files.insert(
"LICENSE.md".into(),
Entry::file(PackSource::File(params.license())),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::File(params.license()),
)),
);
files.insert(
"instructions.md".into(),
Entry::file(PackSource::File(params.instructions())),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::File(params.instructions()),
)),
);
files.insert(
"javascript.squashfs".into(),
Entry::file(PackSource::Squashfs(Arc::new(SqfsDir::new(
js_dir,
tmpdir.clone(),
)))),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Squashfs(Arc::new(SqfsDir::new(js_dir, tmp_dir.clone()))),
)),
);
let mut s9pk = S9pk::new(
@@ -511,26 +525,29 @@ pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
for assets in s9pk.as_manifest().assets.clone() {
s9pk.as_archive_mut().contents_mut().insert_path(
Path::new("assets").join(&assets).with_extension("squashfs"),
Entry::file(PackSource::Squashfs(Arc::new(SqfsDir::new(
assets_dir.join(&assets),
tmpdir.clone(),
)))),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Squashfs(Arc::new(SqfsDir::new(
assets_dir.join(&assets),
tmp_dir.clone(),
))),
)),
)?;
}
s9pk.load_images(&*tmpdir).await?;
s9pk.load_images(tmp_dir.clone()).await?;
s9pk.validate_and_filter(None)?;
s9pk.serialize(
&mut File::create(params.output(&s9pk.as_manifest().id)).await?,
&mut create_file(params.output(&s9pk.as_manifest().id)).await?,
false,
)
.await?;
drop(s9pk);
tmpdir.gc().await?;
tmp_dir.gc().await?;
Ok(())
}

View File

@@ -11,7 +11,6 @@ use persistent_container::PersistentContainer;
use rpc_toolkit::{from_fn_async, CallRemoteHandler, Empty, HandlerArgs, HandlerFor};
use serde::{Deserialize, Serialize};
use start_stop::StartStop;
use tokio::fs::File;
use tokio::sync::Notify;
use ts_rs::TS;
@@ -33,6 +32,7 @@ use crate::status::MainStatus;
use crate::util::actor::background::BackgroundJobQueue;
use crate::util::actor::concurrent::ConcurrentActor;
use crate::util::actor::Actor;
use crate::util::io::create_file;
use crate::util::serde::Pem;
use crate::volume::data_dir;
@@ -403,7 +403,7 @@ impl Service {
#[instrument(skip_all)]
pub async fn backup(&self, guard: impl GenericMountGuard) -> Result<(), Error> {
let id = &self.seed.id;
let mut file = File::create(guard.path().join(id).with_extension("s9pk")).await?;
let mut file = create_file(guard.path().join(id).with_extension("s9pk")).await?;
self.seed
.persistent_container
.s9pk

View File

@@ -9,14 +9,12 @@ use helpers::NonDetachingJoinHandle;
use models::{ImageId, ProcedureName, VolumeId};
use rpc_toolkit::{Empty, Server, ShutdownHandle};
use serde::de::DeserializeOwned;
use tokio::fs::File;
use tokio::process::Command;
use tokio::sync::{oneshot, watch, Mutex, OnceCell};
use tracing::instrument;
use super::service_effect_handler::{service_effect_handler, EffectContext};
use super::transition::{TransitionKind, TransitionState};
use super::ServiceActorSeed;
use crate::context::RpcContext;
use crate::disk::mount::filesystem::bind::Bind;
use crate::disk::mount::filesystem::idmapped::IdMapped;
@@ -32,6 +30,7 @@ use crate::s9pk::merkle_archive::source::FileSource;
use crate::s9pk::S9pk;
use crate::service::start_stop::StartStop;
use crate::service::{rpc, RunningStatus, Service};
use crate::util::io::create_file;
use crate::util::rpc_client::UnixRpcClient;
use crate::util::Invoke;
use crate::volume::{asset_dir, data_dir};
@@ -237,7 +236,7 @@ impl PersistentContainer {
.get_path(Path::new("images").join(arch).join(&env_filename))
.and_then(|e| e.as_file())
{
env.copy(&mut File::create(image_path.join(&env_filename)).await?)
env.copy(&mut create_file(image_path.join(&env_filename)).await?)
.await?;
}
let json_filename = Path::new(image.as_ref()).with_extension("json");
@@ -247,7 +246,7 @@ impl PersistentContainer {
.get_path(Path::new("images").join(arch).join(&json_filename))
.and_then(|e| e.as_file())
{
json.copy(&mut File::create(image_path.join(&json_filename)).await?)
json.copy(&mut create_file(image_path.join(&json_filename)).await?)
.await?;
}
}

View File

@@ -7,8 +7,8 @@ use std::str::FromStr;
use std::sync::{Arc, Weak};
use clap::builder::ValueParserFactory;
use clap::{CommandFactory, FromArgMatches, Parser};
use emver::VersionRange;
use clap::Parser;
use exver::VersionRange;
use imbl_value::json;
use itertools::Itertools;
use models::{
@@ -1383,7 +1383,7 @@ struct CheckDependenciesResult {
is_running: bool,
health_checks: Vec<HealthCheckResult>,
#[ts(type = "string | null")]
version: Option<emver::Version>,
version: Option<exver::ExtendedVersion>,
}
async fn check_dependencies(

View File

@@ -94,12 +94,19 @@ impl ServiceMap {
}
#[instrument(skip_all)]
pub async fn install<S: FileSource + Clone>(
pub async fn install<F, Fut, S: FileSource + Clone>(
&self,
ctx: RpcContext,
mut s9pk: S9pk<S>,
s9pk: F,
recovery_source: Option<impl GenericMountGuard>,
) -> Result<DownloadInstallFuture, Error> {
progress: Option<FullProgressTracker>,
) -> Result<DownloadInstallFuture, Error>
where
F: FnOnce() -> Fut,
Fut: Future<Output = Result<S9pk<S>, Error>>,
S: FileSource + Clone,
{
let mut s9pk = s9pk().await?;
s9pk.validate_and_filter(ctx.s9pk_arch)?;
let manifest = s9pk.as_manifest().clone();
let id = manifest.id.clone();
@@ -118,7 +125,7 @@ impl ServiceMap {
};
let size = s9pk.size();
let progress = FullProgressTracker::new();
let progress = progress.unwrap_or_else(|| FullProgressTracker::new());
let download_progress_contribution = size.unwrap_or(60);
let mut download_progress = progress.add_phase(
InternedString::intern("Download"),

View File

@@ -8,7 +8,6 @@ use patch_db::json_ptr::ROOT;
use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use tokio::io::AsyncWriteExt;
use tokio::try_join;
use tracing::instrument;
@@ -35,7 +34,7 @@ use crate::prelude::*;
use crate::progress::{FullProgress, PhaseProgressTrackerHandle};
use crate::rpc_continuations::Guid;
use crate::util::crypto::EncryptedWire;
use crate::util::io::{dir_copy, dir_size, Counter};
use crate::util::io::{create_file, dir_copy, dir_size, Counter};
use crate::{Error, ErrorKind, ResultExt};
pub fn setup<C: Context>() -> ParentHandler<C> {
@@ -324,7 +323,7 @@ pub async fn execute(
pub async fn complete(ctx: SetupContext) -> Result<SetupResult, Error> {
match ctx.result.get() {
Some(Ok((res, ctx))) => {
let mut guid_file = File::create("/media/startos/config/disk.guid").await?;
let mut guid_file = create_file("/media/startos/config/disk.guid").await?;
guid_file.write_all(ctx.disk_guid.as_bytes()).await?;
guid_file.sync_all().await?;
Ok(res.clone())

View File

@@ -13,6 +13,7 @@ use ts_rs::TS;
use crate::context::{CliContext, RpcContext};
use crate::prelude::*;
use crate::util::clap::FromStrParser;
use crate::util::io::create_file;
use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat};
pub const SSH_AUTHORIZED_KEYS_FILE: &str = "/home/start9/.ssh/authorized_keys";
@@ -229,7 +230,7 @@ pub async fn sync_keys<P: AsRef<Path>>(keys: &SshKeys, dest: P) -> Result<(), Er
if tokio::fs::metadata(ssh_dir).await.is_err() {
tokio::fs::create_dir_all(ssh_dir).await?;
}
let mut f = tokio::fs::File::create(dest).await?;
let mut f = create_file(dest).await?;
for key in keys.0.values() {
f.write_all(key.0.to_key_format().as_bytes()).await?;
f.write_all(b"\n").await?;

View File

@@ -20,6 +20,7 @@ use crate::prelude::*;
use crate::rpc_continuations::RpcContinuations;
use crate::shutdown::Shutdown;
use crate::util::cpupower::{get_available_governors, set_governor, Governor};
use crate::util::io::open_file;
use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat};
use crate::util::Invoke;
@@ -657,7 +658,7 @@ impl ProcStat {
async fn get_proc_stat() -> Result<ProcStat, Error> {
use tokio::io::AsyncBufReadExt;
let mut cpu_line = String::new();
let _n = tokio::io::BufReader::new(tokio::fs::File::open("/proc/stat").await?)
let _n = tokio::io::BufReader::new(open_file("/proc/stat").await?)
.read_line(&mut cpu_line)
.await?;
let stats: Vec<u64> = cpu_line

View File

@@ -4,8 +4,8 @@ use std::time::Duration;
use clap::{ArgAction, Parser};
use color_eyre::eyre::{eyre, Result};
use emver::{Version, VersionRange};
use futures::{FutureExt, TryStreamExt};
use exver::{Version, VersionRange};
use futures::TryStreamExt;
use helpers::{AtomicFile, NonDetachingJoinHandle};
use imbl_value::json;
use itertools::Itertools;

View File

@@ -1,3 +1,4 @@
use std::io::SeekFrom;
use std::pin::Pin;
use std::sync::Arc;
use std::task::Poll;
@@ -5,20 +6,20 @@ use std::time::Duration;
use axum::body::Body;
use axum::response::Response;
use futures::StreamExt;
use futures::{ready, FutureExt, StreamExt};
use http::header::CONTENT_LENGTH;
use http::StatusCode;
use imbl_value::InternedString;
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
use tokio::io::{AsyncRead, AsyncSeek, AsyncSeekExt, AsyncWrite, AsyncWriteExt};
use tokio::sync::watch;
use crate::context::RpcContext;
use crate::prelude::*;
use crate::rpc_continuations::{Guid, RpcContinuation};
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::multi_cursor_file::{FileCursor, MultiCursorFile};
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::io::TmpDir;
use crate::util::io::{create_file, TmpDir};
pub async fn upload(
ctx: &RpcContext,
@@ -215,14 +216,15 @@ impl UploadingFile {
pub async fn new() -> Result<(UploadHandle, Self), Error> {
let progress = watch::channel(Progress::default());
let tmp_dir = Arc::new(TmpDir::new().await?);
let file = File::create(tmp_dir.join("upload.tmp")).await?;
let file = create_file(tmp_dir.join("upload.tmp")).await?;
let uploading = Self {
tmp_dir,
tmp_dir: tmp_dir.clone(),
file: MultiCursorFile::open(&file).await?,
progress: progress.1,
};
Ok((
UploadHandle {
tmp_dir,
file,
progress: progress.0,
},
@@ -237,22 +239,127 @@ impl UploadingFile {
}
}
impl ArchiveSource for UploadingFile {
type Reader = <MultiCursorFile as ArchiveSource>::Reader;
type FetchReader = <MultiCursorFile as ArchiveSource>::FetchReader;
type FetchAllReader = UploadingFileReader;
async fn size(&self) -> Option<u64> {
Progress::expected_size(&mut self.progress.clone()).await
}
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
Progress::ready(&mut self.progress.clone()).await?;
self.file.fetch_all().await
async fn fetch_all(&self) -> Result<Self::FetchAllReader, Error> {
let mut file = self.file.cursor().await?;
file.seek(SeekFrom::Start(0)).await?;
Ok(UploadingFileReader {
tmp_dir: self.tmp_dir.clone(),
file,
position: 0,
to_seek: None,
progress: self.progress.clone(),
})
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
async fn fetch(&self, position: u64, size: u64) -> Result<Self::FetchReader, Error> {
Progress::ready_for(&mut self.progress.clone(), position + size).await?;
self.file.fetch(position, size).await
}
}
#[pin_project::pin_project(project = UploadingFileReaderProjection)]
pub struct UploadingFileReader {
tmp_dir: Arc<TmpDir>,
position: u64,
to_seek: Option<SeekFrom>,
#[pin]
file: FileCursor,
progress: watch::Receiver<Progress>,
}
impl<'a> UploadingFileReaderProjection<'a> {
fn poll_ready(&mut self, cx: &mut std::task::Context<'_>) -> Result<bool, std::io::Error> {
let ready = Progress::ready(&mut *self.progress);
tokio::pin!(ready);
Ok(ready
.poll_unpin(cx)
.map_err(|e| std::io::Error::other(e.source))?
.is_ready())
}
fn poll_ready_for(
&mut self,
cx: &mut std::task::Context<'_>,
size: u64,
) -> Result<bool, std::io::Error> {
let ready = Progress::ready_for(&mut *self.progress, size);
tokio::pin!(ready);
Ok(ready
.poll_unpin(cx)
.map_err(|e| std::io::Error::other(e.source))?
.is_ready())
}
}
impl AsyncRead for UploadingFileReader {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut tokio::io::ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
let mut this = self.project();
let position = *this.position;
if this.poll_ready(cx)? || this.poll_ready_for(cx, position + buf.remaining() as u64)? {
let start = buf.filled().len();
let res = this.file.poll_read(cx, buf);
*this.position += (buf.filled().len() - start) as u64;
res
} else {
Poll::Pending
}
}
}
impl AsyncSeek for UploadingFileReader {
fn start_seek(self: Pin<&mut Self>, position: SeekFrom) -> std::io::Result<()> {
let this = self.project();
*this.to_seek = Some(position);
Ok(())
}
fn poll_complete(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<std::io::Result<u64>> {
let mut this = self.project();
if let Some(to_seek) = *this.to_seek {
let size = match to_seek {
SeekFrom::Current(n) => (*this.position as i64 + n) as u64,
SeekFrom::Start(n) => n,
SeekFrom::End(n) => {
let expected_size = this.progress.borrow().expected_size;
match expected_size {
Some(end) => (end as i64 + n) as u64,
None => {
if !this.poll_ready(cx)? {
return Poll::Pending;
}
(this.progress.borrow().expected_size.ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::Other,
eyre!("upload maked complete without expected size"),
)
})? as i64
+ n) as u64
}
}
}
};
if !this.poll_ready_for(cx, size)? {
return Poll::Pending;
}
}
if let Some(seek) = this.to_seek.take() {
this.file.as_mut().start_seek(seek)?;
}
*this.position = ready!(this.file.as_mut().poll_complete(cx)?);
Poll::Ready(Ok(*this.position))
}
}
#[pin_project::pin_project(PinnedDrop)]
pub struct UploadHandle {
tmp_dir: Arc<TmpDir>,
#[pin]
file: File,
progress: watch::Sender<Progress>,

View File

@@ -610,13 +610,13 @@ pub fn dir_copy<'a, P0: AsRef<Path> + 'a + Send + Sync, P1: AsRef<Path> + 'a + S
let src_path = e.path();
let dst_path = dst_path.join(e.file_name());
if m.is_file() {
let mut dst_file = tokio::fs::File::create(&dst_path).await.with_ctx(|_| {
let mut dst_file = create_file(&dst_path).await.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
format!("create {}", dst_path.display()),
)
})?;
let mut rdr = tokio::fs::File::open(&src_path).await.with_ctx(|_| {
let mut rdr = open_file(&src_path).await.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
format!("open {}", src_path.display()),
@@ -829,6 +829,13 @@ impl Drop for TmpDir {
}
}
pub async fn open_file(path: impl AsRef<Path>) -> Result<File, Error> {
let path = path.as_ref();
File::open(path)
.await
.with_ctx(|_| (ErrorKind::Filesystem, lazy_format!("open {path:?}")))
}
pub async fn create_file(path: impl AsRef<Path>) -> Result<File, Error> {
let path = path.as_ref();
if let Some(parent) = path.parent() {

View File

@@ -26,6 +26,7 @@ use tokio::sync::{oneshot, Mutex, OwnedMutexGuard, RwLock};
use tracing::instrument;
use crate::shutdown::Shutdown;
use crate::util::io::create_file;
use crate::{Error, ErrorKind, ResultExt as _};
pub mod actor;
pub mod clap;
@@ -385,16 +386,16 @@ impl<T> SOption<T> for SNone<T> {}
#[async_trait]
pub trait AsyncFileExt: Sized {
async fn maybe_open<P: AsRef<Path> + Send + Sync>(path: P) -> std::io::Result<Option<Self>>;
async fn maybe_open<P: AsRef<Path> + Send + Sync>(path: P) -> Result<Option<Self>, Error>;
async fn delete<P: AsRef<Path> + Send + Sync>(path: P) -> std::io::Result<()>;
}
#[async_trait]
impl AsyncFileExt for File {
async fn maybe_open<P: AsRef<Path> + Send + Sync>(path: P) -> std::io::Result<Option<Self>> {
match File::open(path).await {
async fn maybe_open<P: AsRef<Path> + Send + Sync>(path: P) -> Result<Option<Self>, Error> {
match File::open(path.as_ref()).await {
Ok(f) => Ok(Some(f)),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(None),
Err(e) => Err(e),
Err(e) => Err(e).with_ctx(|_| (ErrorKind::Filesystem, path.as_ref().display())),
}
}
async fn delete<P: AsRef<Path> + Send + Sync>(path: P) -> std::io::Result<()> {
@@ -590,9 +591,7 @@ impl FileLock {
.await
.with_ctx(|_| (crate::ErrorKind::Filesystem, parent.display().to_string()))?;
}
let f = File::create(&path)
.await
.with_ctx(|_| (crate::ErrorKind::Filesystem, path.display().to_string()))?;
let f = create_file(&path).await?;
let file_guard = tokio::task::spawn_blocking(move || {
fd_lock_rs::FdLock::lock(f, fd_lock_rs::LockType::Exclusive, blocking)
})

View File

@@ -3,7 +3,6 @@ use std::path::Path;
use clap::Parser;
use rpc_toolkit::{from_fn_async, Context, ParentHandler};
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use url::Url;
use crate::context::CliContext;
@@ -11,7 +10,7 @@ use crate::prelude::*;
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::io::ParallelBlake3Writer;
use crate::util::io::{open_file, ParallelBlake3Writer};
use crate::util::serde::Base16;
use crate::util::Apply;
use crate::CAP_10_MiB;
@@ -40,7 +39,7 @@ pub async fn b3sum(
path: impl AsRef<Path>,
allow_mmap: bool,
) -> Result<Base16<[u8; 32]>, Error> {
let file = MultiCursorFile::from(File::open(path).await?);
let file = MultiCursorFile::from(open_file(path).await?);
if allow_mmap {
return file.blake3_mmap().await.map(|h| *h.as_bytes()).map(Base16);
}

View File

@@ -1,4 +1,3 @@
use std::any::TypeId;
use std::collections::VecDeque;
use std::marker::PhantomData;
use std::ops::Deref;
@@ -9,10 +8,9 @@ use clap::{ArgMatches, CommandFactory, FromArgMatches};
use color_eyre::eyre::eyre;
use imbl::OrdMap;
use openssl::pkey::{PKey, Private};
use openssl::x509::{X509Ref, X509};
use openssl::x509::X509;
use rpc_toolkit::{
CliBindings, Context, Handler, HandlerArgs, HandlerArgsFor, HandlerFor, HandlerTypes,
PrintCliResult,
CliBindings, Context, HandlerArgs, HandlerArgsFor, HandlerFor, HandlerTypes, PrintCliResult,
};
use serde::de::DeserializeOwned;
use serde::ser::{SerializeMap, SerializeSeq};
@@ -1188,7 +1186,7 @@ pub trait PemEncoding: Sized {
impl PemEncoding for X509 {
fn from_pem<E: serde::de::Error>(pem: &str) -> Result<Self, E> {
X509::from_pem(pem.as_bytes()).map_err(E::custom)
Self::from_pem(pem.as_bytes()).map_err(E::custom)
}
fn to_pem<E: serde::ser::Error>(&self) -> Result<String, E> {
String::from_utf8((&**self).to_pem().map_err(E::custom)?).map_err(E::custom)
@@ -1197,7 +1195,7 @@ impl PemEncoding for X509 {
impl PemEncoding for PKey<Private> {
fn from_pem<E: serde::de::Error>(pem: &str) -> Result<Self, E> {
PKey::<Private>::private_key_from_pem(pem.as_bytes()).map_err(E::custom)
Self::private_key_from_pem(pem.as_bytes()).map_err(E::custom)
}
fn to_pem<E: serde::ser::Error>(&self) -> Result<String, E> {
String::from_utf8((&**self).private_key_to_pem_pkcs8().map_err(E::custom)?)
@@ -1207,7 +1205,7 @@ impl PemEncoding for PKey<Private> {
impl PemEncoding for ssh_key::PrivateKey {
fn from_pem<E: serde::de::Error>(pem: &str) -> Result<Self, E> {
ssh_key::PrivateKey::from_openssh(pem.as_bytes()).map_err(E::custom)
Self::from_openssh(pem.as_bytes()).map_err(E::custom)
}
fn to_pem<E: serde::ser::Error>(&self) -> Result<String, E> {
self.to_openssh(ssh_key::LineEnding::LF)
@@ -1219,7 +1217,7 @@ impl PemEncoding for ssh_key::PrivateKey {
impl PemEncoding for ed25519_dalek::VerifyingKey {
fn from_pem<E: serde::de::Error>(pem: &str) -> Result<Self, E> {
use ed25519_dalek::pkcs8::DecodePublicKey;
ed25519_dalek::VerifyingKey::from_public_key_pem(pem).map_err(E::custom)
Self::from_public_key_pem(pem).map_err(E::custom)
}
fn to_pem<E: serde::ser::Error>(&self) -> Result<String, E> {
use ed25519_dalek::pkcs8::EncodePublicKey;
@@ -1228,6 +1226,19 @@ impl PemEncoding for ed25519_dalek::VerifyingKey {
}
}
impl PemEncoding for ed25519_dalek::SigningKey {
fn from_pem<E: serde::de::Error>(pem: &str) -> Result<Self, E> {
use ed25519_dalek::pkcs8::DecodePrivateKey;
Self::from_pkcs8_pem(pem).map_err(E::custom)
}
fn to_pem<E: serde::ser::Error>(&self) -> Result<String, E> {
use ed25519_dalek::pkcs8::EncodePrivateKey;
self.to_pkcs8_pem(pkcs8::LineEnding::LF)
.map_err(E::custom)
.map(|s| s.as_str().to_owned())
}
}
pub mod pem {
use serde::{Deserialize, Deserializer, Serializer};

View File

@@ -25,20 +25,20 @@ enum Version {
V0_3_5_1(Wrapper<v0_3_5_1::Version>),
V0_3_5_2(Wrapper<v0_3_5_2::Version>),
V0_3_6(Wrapper<v0_3_6::Version>),
Other(emver::Version),
Other(exver::Version),
}
impl Version {
fn from_util_version(version: crate::util::VersionString) -> Self {
fn from_exver_version(version: exver::Version) -> Self {
serde_json::to_value(version.clone())
.and_then(serde_json::from_value)
.unwrap_or_else(|_e| {
tracing::warn!("Can't deserialize: {:?} and falling back to other", version);
Version::Other(version.into_version())
Version::Other(version)
})
}
#[cfg(test)]
fn as_sem_ver(&self) -> emver::Version {
fn as_exver(&self) -> exver::Version {
match self {
Version::LT0_3_5(LTWrapper(_, x)) => x.clone(),
Version::V0_3_5(Wrapper(x)) => x.semver(),
@@ -56,8 +56,8 @@ where
{
type Previous: VersionT;
fn new() -> Self;
fn semver(&self) -> emver::Version;
fn compat(&self) -> &'static emver::VersionRange;
fn semver(&self) -> exver::Version;
fn compat(&self) -> &'static exver::VersionRange;
fn up(&self, db: &TypedPatchDb<Database>) -> impl Future<Output = Result<(), Error>> + Send;
fn down(&self, db: &TypedPatchDb<Database>) -> impl Future<Output = Result<(), Error>> + Send;
fn commit(
@@ -158,7 +158,7 @@ where
}
#[derive(Debug, Clone)]
struct LTWrapper<T>(T, emver::Version);
struct LTWrapper<T>(T, exver::Version);
impl<T> serde::Serialize for LTWrapper<T>
where
T: VersionT,
@@ -172,10 +172,10 @@ where
T: VersionT,
{
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let v = crate::util::VersionString::deserialize(deserializer)?;
let v = exver::Version::deserialize(deserializer)?;
let version = T::new();
if *v < version.semver() {
Ok(Self(version, v.into_version()))
if v < version.semver() {
Ok(Self(version, v))
} else {
Err(serde::de::Error::custom("Mismatched Version"))
}
@@ -197,9 +197,9 @@ where
T: VersionT,
{
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let v = crate::util::VersionString::deserialize(deserializer)?;
let v = exver::Version::deserialize(deserializer)?;
let version = T::new();
if *v == version.semver() {
if v == version.semver() {
Ok(Wrapper(version))
} else {
Err(serde::de::Error::custom("Mismatched Version"))
@@ -212,7 +212,7 @@ pub async fn init(
mut progress: PhaseProgressTrackerHandle,
) -> Result<(), Error> {
progress.start();
let version = Version::from_util_version(
let version = Version::from_exver_version(
db.peek()
.await
.as_public()
@@ -256,9 +256,18 @@ mod tests {
use super::*;
fn em_version() -> impl Strategy<Value = emver::Version> {
any::<(usize, usize, usize, usize)>().prop_map(|(major, minor, patch, super_minor)| {
emver::Version::new(major, minor, patch, super_minor)
fn em_version() -> impl Strategy<Value = exver::Version> {
any::<(usize, usize, usize, bool)>().prop_map(|(major, minor, patch, alpha)| {
if alpha {
exver::Version::new(
[0, major, minor]
.into_iter()
.chain(Some(patch).filter(|n| *n != 0)),
[],
)
} else {
exver::Version::new([major, minor, patch], [])
}
})
}
@@ -273,15 +282,15 @@ mod tests {
proptest! {
#[test]
fn emversion_isomorphic_version(original in em_version()) {
let version = Version::from_util_version(original.clone().into());
let back = version.as_sem_ver();
fn exversion_isomorphic_version(original in em_version()) {
let version = Version::from_exver_version(original.clone().into());
let back = version.as_exver();
prop_assert_eq!(original, back, "All versions should round trip");
}
#[test]
fn version_isomorphic_em_version(version in versions()) {
let sem_ver = version.as_sem_ver();
let back = Version::from_util_version(sem_ver.into());
let sem_ver = version.as_exver();
let back = Version::from_exver_version(sem_ver.into());
prop_assert_eq!(format!("{:?}",version), format!("{:?}", back), "All versions should round trip");
}
}

View File

@@ -1,4 +1,4 @@
use emver::VersionRange;
use exver::{ExtendedVersion, VersionRange};
use super::VersionT;
use crate::db::model::Database;
@@ -6,17 +6,25 @@ use crate::prelude::*;
use crate::version::Current;
lazy_static::lazy_static! {
pub static ref V0_3_0_COMPAT: VersionRange = VersionRange::Conj(
Box::new(VersionRange::Anchor(
emver::GTE,
emver::Version::new(0, 3, 0, 0),
)),
Box::new(VersionRange::Anchor(emver::LTE, Current::new().semver())),
pub static ref V0_3_0_COMPAT: VersionRange = VersionRange::and(
VersionRange::anchor(
exver::GTE,
ExtendedVersion::new(
exver::Version::new([0, 3, 0], []),
exver::Version::default(),
),
),
VersionRange::anchor(
exver::LTE,
ExtendedVersion::new(
Current::new().semver(),
exver::Version::default(),
)
),
);
static ref V0_3_5: exver::Version = exver::Version::new([0, 3, 5], []);
}
const V0_3_5: emver::Version = emver::Version::new(0, 3, 5, 0);
#[derive(Clone, Debug)]
pub struct Version;
@@ -25,8 +33,8 @@ impl VersionT for Version {
fn new() -> Self {
Version
}
fn semver(&self) -> emver::Version {
V0_3_5
fn semver(&self) -> exver::Version {
V0_3_5.clone()
}
fn compat(&self) -> &'static VersionRange {
&V0_3_0_COMPAT

View File

@@ -1,11 +1,13 @@
use emver::VersionRange;
use exver::VersionRange;
use super::v0_3_5::V0_3_0_COMPAT;
use super::{v0_3_5, VersionT};
use crate::db::model::Database;
use crate::prelude::*;
const V0_3_5_1: emver::Version = emver::Version::new(0, 3, 5, 1);
lazy_static::lazy_static! {
static ref V0_3_5_1: exver::Version = exver::Version::new([0, 3, 5, 1], []);
}
#[derive(Clone, Debug)]
pub struct Version;
@@ -15,8 +17,8 @@ impl VersionT for Version {
fn new() -> Self {
Version
}
fn semver(&self) -> emver::Version {
V0_3_5_1
fn semver(&self) -> exver::Version {
V0_3_5_1.clone()
}
fn compat(&self) -> &'static VersionRange {
&V0_3_0_COMPAT

View File

@@ -1,11 +1,13 @@
use emver::VersionRange;
use exver::VersionRange;
use super::v0_3_5::V0_3_0_COMPAT;
use super::{v0_3_5_1, VersionT};
use crate::db::model::Database;
use crate::prelude::*;
const V0_3_5_2: emver::Version = emver::Version::new(0, 3, 5, 2);
lazy_static::lazy_static! {
static ref V0_3_5_2: exver::Version = exver::Version::new([0, 3, 5, 2], []);
}
#[derive(Clone, Debug)]
pub struct Version;
@@ -15,8 +17,8 @@ impl VersionT for Version {
fn new() -> Self {
Version
}
fn semver(&self) -> emver::Version {
V0_3_5_2
fn semver(&self) -> exver::Version {
V0_3_5_2.clone()
}
fn compat(&self) -> &'static VersionRange {
&V0_3_0_COMPAT

View File

@@ -1,11 +1,13 @@
use emver::VersionRange;
use exver::VersionRange;
use super::v0_3_5::V0_3_0_COMPAT;
use super::{v0_3_5_1, VersionT};
use crate::db::model::Database;
use crate::prelude::*;
const V0_3_6: emver::Version = emver::Version::new(0, 3, 6, 0);
lazy_static::lazy_static! {
static ref V0_3_6: exver::Version = exver::Version::new([0, 3, 6], []);
}
#[derive(Clone, Debug)]
pub struct Version;
@@ -15,8 +17,8 @@ impl VersionT for Version {
fn new() -> Self {
Version
}
fn semver(&self) -> emver::Version {
V0_3_6
fn semver(&self) -> exver::Version {
V0_3_6.clone()
}
fn compat(&self) -> &'static VersionRange {
&V0_3_0_COMPAT

1
debian/postinst vendored
View File

@@ -78,6 +78,7 @@ sed -i '/\(^\|#\)Compress=/c\Compress=yes' /etc/systemd/journald.conf
sed -i '/\(^\|#\)SystemMaxUse=/c\SystemMaxUse=1G' /etc/systemd/journald.conf
sed -i '/\(^\|#\)ForwardToSyslog=/c\ForwardToSyslog=no' /etc/systemd/journald.conf
sed -i '/^\s*#\?\s*issue_discards\s*=\s*/c\issue_discards = 1' /etc/lvm/lvm.conf
sed -i '/\(^\|#\)\s*unqualified-search-registries\s*=\s*/c\unqualified-search-registries = ["docker.io"]' /etc/containers/registries.conf
mkdir -p /etc/nginx/ssl

6
package-lock.json generated Normal file
View File

@@ -0,0 +1,6 @@
{
"name": "embassy-os",
"lockfileVersion": 2,
"requires": true,
"packages": {}
}

View File

@@ -2,9 +2,9 @@ import * as matches from "ts-matches"
const starSub = /((\d+\.)*\d+)\.\*/
// prettier-ignore
export type ValidEmVer = `${number}${`.${number}` | ""}${`.${number}` | ""}${`-${string}` | ""}`;
export type ValidEmVer = string;
// prettier-ignore
export type ValidEmVerRange = `${'>=' | '<='| '<' | '>' | ''}${'^' | '~' | ''}${number | '*'}${`.${number | '*'}` | ""}${`.${number | '*'}` | ""}${`-${string}` | ""}`;
export type ValidEmVerRange = string;
function incrementLastNumber(list: number[]) {
const newList = [...list]

View File

@@ -1,6 +1,7 @@
export { EmVer } from "./emverLite/mod"
export { setupManifest } from "./manifest/setupManifest"
export { setupExposeStore } from "./store/setupExposeStore"
export { S9pk } from "./s9pk"
export * as config from "./config"
export * as CB from "./config/builder"
export * as CT from "./config/configTypes"

View File

@@ -6,6 +6,7 @@ export { setupManifest } from "./manifest/setupManifest"
export { FileHelper } from "./util/fileHelper"
export { setupExposeStore } from "./store/setupExposeStore"
export { pathBuilder } from "./store/PathBuilder"
export { S9pk } from "./s9pk"
export * as actions from "./actions"
export * as backup from "./backup"

View File

@@ -1,10 +1,11 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PackageDetailLevel } from "./PackageDetailLevel"
import type { PackageId } from "./PackageId"
import type { Version } from "./Version"
export type GetPackageParams = {
id: PackageId | null
version: string | null
sourceVersion: string | null
sourceVersion: Version | null
otherVersions: PackageDetailLevel | null
}

View File

@@ -28,6 +28,6 @@ export type Manifest = {
dependencies: Dependencies
hardwareRequirements: HardwareRequirements
gitHash: string | null
osVersion: Version
osVersion: string
hasConfig: boolean
}

View File

@@ -4,7 +4,6 @@ import type { Description } from "./Description"
import type { HardwareRequirements } from "./HardwareRequirements"
import type { MerkleArchiveCommitment } from "./MerkleArchiveCommitment"
import type { RegistryAsset } from "./RegistryAsset"
import type { Version } from "./Version"
export type PackageVersionInfo = {
title: string
@@ -17,7 +16,7 @@ export type PackageVersionInfo = {
upstreamRepo: string
supportSite: string
marketingSite: string
osVersion: Version
osVersion: string
hardwareRequirements: HardwareRequirements
sourceVersion: string | null
s9pk: RegistryAsset<MerkleArchiveCommitment>

View File

@@ -2,7 +2,6 @@
import type { Governor } from "./Governor"
import type { IpInfo } from "./IpInfo"
import type { ServerStatus } from "./ServerStatus"
import type { Version } from "./Version"
import type { WifiInfo } from "./WifiInfo"
export type ServerInfo = {
@@ -10,7 +9,7 @@ export type ServerInfo = {
platform: string
id: string
hostname: string
version: Version
version: string
lastBackup: string | null
eosVersionCompat: string
lanAddress: string

67
sdk/lib/s9pk/index.ts Normal file
View File

@@ -0,0 +1,67 @@
import { DataUrl, Manifest, MerkleArchiveCommitment } from "../osBindings"
import { ArrayBufferReader, MerkleArchive } from "./merkleArchive"
import mime from "mime"
const magicAndVersion = new Uint8Array([59, 59, 2])
export function compare(a: Uint8Array, b: Uint8Array) {
if (a.length !== b.length) return false
for (let i = 0; i < a.length; i++) {
if (a[i] !== b[i]) return false
}
return true
}
export class S9pk {
private constructor(
readonly manifest: Manifest,
readonly archive: MerkleArchive,
readonly size: number,
) {}
static async deserialize(
source: Blob,
commitment: MerkleArchiveCommitment | null,
): Promise<S9pk> {
const header = new ArrayBufferReader(
await source
.slice(0, magicAndVersion.length + MerkleArchive.headerSize)
.arrayBuffer(),
)
const magicVersion = new Uint8Array(header.next(magicAndVersion.length))
if (!compare(magicVersion, magicAndVersion)) {
throw new Error("Invalid Magic or Unexpected Version")
}
const archive = await MerkleArchive.deserialize(
source,
"s9pk",
header,
commitment,
)
const manifest = JSON.parse(
new TextDecoder().decode(
await archive.contents
.getPath(["manifest.json"])
?.verifiedFileContents(),
),
)
return new S9pk(manifest, archive, source.length)
}
async icon(): Promise<DataUrl> {
const iconName = Object.keys(this.archive.contents.contents).find(
(name) =>
name.startsWith("icon.") && mime.getType(name)?.startsWith("image/"),
)
if (!iconName) {
throw new Error("no icon found in archive")
}
return (
`data:${mime.getType(iconName)};base64,` +
Buffer.from(
await this.archive.contents.getPath([iconName])!.verifiedFileContents(),
).toString("base64")
)
}
}

View File

@@ -0,0 +1,80 @@
import { ArrayBufferReader, Entry } from "."
import { blake3 } from "@noble/hashes/blake3"
import { serializeVarint } from "./varint"
import { FileContents } from "./fileContents"
import { compare } from ".."
export class DirectoryContents {
static readonly headerSize =
8 + // position: u64 BE
8 // size: u64 BE
private constructor(readonly contents: { [name: string]: Entry }) {}
static async deserialize(
source: Blob,
header: ArrayBufferReader,
sighash: Uint8Array,
maxSize: bigint,
): Promise<DirectoryContents> {
const position = header.nextU64()
const size = header.nextU64()
if (size > maxSize) {
throw new Error("size is greater than signed")
}
const tocReader = new ArrayBufferReader(
await source
.slice(Number(position), Number(position + size))
.arrayBuffer(),
)
const len = tocReader.nextVarint()
const entries: { [name: string]: Entry } = {}
for (let i = 0; i < len; i++) {
const name = tocReader.nextVarstring()
const entry = await Entry.deserialize(source, tocReader)
entries[name] = entry
}
const res = new DirectoryContents(entries)
if (!compare(res.sighash(), sighash)) {
throw new Error("hash sum does not match")
}
return res
}
sighash(): Uint8Array {
const hasher = blake3.create({})
const names = Object.keys(this.contents).sort()
hasher.update(new Uint8Array(serializeVarint(names.length)))
for (const name of names) {
const entry = this.contents[name]
const nameBuf = new TextEncoder().encode(name)
hasher.update(new Uint8Array(serializeVarint(nameBuf.length)))
hasher.update(nameBuf)
hasher.update(new Uint8Array(entry.hash))
const sizeBuf = new Uint8Array(8)
new DataView(sizeBuf.buffer).setBigUint64(0, entry.size)
hasher.update(sizeBuf)
hasher.update(new Uint8Array([0]))
}
return hasher.digest()
}
getPath(path: string[]): Entry | null {
if (path.length === 0) {
return null
}
const next = this.contents[path[0]]
const rest = path.slice(1)
if (next === undefined) {
return null
}
if (rest.length === 0) {
return next
}
if (next.contents instanceof DirectoryContents) {
return next.contents.getPath(rest)
}
return null
}
}

View File

@@ -0,0 +1,24 @@
import { blake3 } from "@noble/hashes/blake3"
import { ArrayBufferReader } from "."
import { compare } from ".."
export class FileContents {
private constructor(readonly contents: Blob) {}
static deserialize(
source: Blob,
header: ArrayBufferReader,
size: bigint,
): FileContents {
const position = header.nextU64()
return new FileContents(
source.slice(Number(position), Number(position + size)),
)
}
async verified(hash: Uint8Array): Promise<ArrayBuffer> {
const res = await this.contents.arrayBuffer()
if (!compare(hash, blake3(new Uint8Array(res)))) {
throw new Error("hash sum mismatch")
}
return res
}
}

View File

@@ -0,0 +1,167 @@
import { MerkleArchiveCommitment } from "../../osBindings"
import { DirectoryContents } from "./directoryContents"
import { FileContents } from "./fileContents"
import { ed25519ph } from "@noble/curves/ed25519"
import { sha512 } from "@noble/hashes/sha2"
import { VarIntProcessor } from "./varint"
import { compare } from ".."
const maxVarstringLen = 1024 * 1024
export type Signer = {
pubkey: Uint8Array
signature: Uint8Array
maxSize: bigint
context: string
}
export class ArrayBufferReader {
constructor(private buffer: ArrayBuffer) {}
next(length: number): ArrayBuffer {
const res = this.buffer.slice(0, length)
this.buffer = this.buffer.slice(length)
return res
}
nextU64(): bigint {
return new DataView(this.next(8)).getBigUint64(0)
}
nextVarint(): number {
const p = new VarIntProcessor()
while (!p.finished()) {
p.push(new Uint8Array(this.buffer.slice(0, 1))[0])
this.buffer = this.buffer.slice(1)
}
const res = p.decode()
if (res === null) {
throw new Error("Reached EOF")
}
return res
}
nextVarstring(): string {
const len = Math.min(this.nextVarint(), maxVarstringLen)
return new TextDecoder().decode(this.next(len))
}
}
export class MerkleArchive {
static readonly headerSize =
32 + // pubkey
64 + // signature
32 + // sighash
8 + // size
DirectoryContents.headerSize
private constructor(
readonly signer: Signer,
readonly contents: DirectoryContents,
) {}
static async deserialize(
source: Blob,
context: string,
header: ArrayBufferReader,
commitment: MerkleArchiveCommitment | null,
): Promise<MerkleArchive> {
const pubkey = new Uint8Array(header.next(32))
const signature = new Uint8Array(header.next(64))
const sighash = new Uint8Array(header.next(32))
const rootMaxSizeBytes = header.next(8)
const maxSize = new DataView(rootMaxSizeBytes).getBigUint64(0)
if (
!ed25519ph.verify(
signature,
new Uint8Array(
await new Blob([sighash, rootMaxSizeBytes]).arrayBuffer(),
),
pubkey,
{
context: new TextEncoder().encode(context),
zip215: true,
},
)
) {
throw new Error("signature verification failed")
}
if (commitment) {
if (
!compare(
sighash,
new Uint8Array(Buffer.from(commitment.rootSighash, "base64").buffer),
)
) {
throw new Error("merkle root mismatch")
}
if (maxSize > commitment.rootMaxsize) {
throw new Error("root directory max size too large")
}
} else if (maxSize > 1024 * 1024) {
throw new Error(
"root directory max size over 1MiB, cancelling download in case of DOS attack",
)
}
const contents = await DirectoryContents.deserialize(
source,
header,
sighash,
maxSize,
)
return new MerkleArchive(
{
pubkey,
signature,
maxSize,
context,
},
contents,
)
}
}
export class Entry {
private constructor(
readonly hash: Uint8Array,
readonly size: bigint,
readonly contents: EntryContents,
) {}
static async deserialize(
source: Blob,
header: ArrayBufferReader,
): Promise<Entry> {
const hash = new Uint8Array(header.next(32))
const size = header.nextU64()
const contents = await deserializeEntryContents(source, header, hash, size)
return new Entry(new Uint8Array(hash), size, contents)
}
async verifiedFileContents(): Promise<ArrayBuffer> {
if (!this.contents) {
throw new Error("file is missing from archive")
}
if (!(this.contents instanceof FileContents)) {
throw new Error("is not a regular file")
}
return this.contents.verified(this.hash)
}
}
export type EntryContents = null | FileContents | DirectoryContents
async function deserializeEntryContents(
source: Blob,
header: ArrayBufferReader,
hash: Uint8Array,
size: bigint,
): Promise<EntryContents> {
const typeId = new Uint8Array(header.next(1))[0]
switch (typeId) {
case 0:
return null
case 1:
return FileContents.deserialize(source, header, size)
case 2:
return DirectoryContents.deserialize(source, header, hash, size)
default:
throw new Error(`Unknown type id ${typeId} found in MerkleArchive`)
}
}

View File

@@ -0,0 +1,62 @@
const msb = 0x80
const dropMsb = 0x7f
const maxSize = Math.floor((8 * 8 + 7) / 7)
export class VarIntProcessor {
private buf: Uint8Array
private i: number
constructor() {
this.buf = new Uint8Array(maxSize)
this.i = 0
}
push(b: number) {
if (this.i >= maxSize) {
throw new Error("Unterminated varint")
}
this.buf[this.i] = b
this.i += 1
}
finished(): boolean {
return this.i > 0 && (this.buf[this.i - 1] & msb) === 0
}
decode(): number | null {
let result = 0
let shift = 0
let success = false
for (let i = 0; i < this.i; i++) {
const b = this.buf[i]
const msbDropped = b & dropMsb
result |= msbDropped << shift
shift += 7
if ((b & msb) == 0 || shift > 9 * 7) {
success = (b & msb) === 0
break
}
}
if (success) {
return result
} else {
console.error(this.buf)
return null
}
}
}
export function serializeVarint(int: number): ArrayBuffer {
const buf = new Uint8Array(maxSize)
let n = int
let i = 0
while (n >= msb) {
buf[i] = msb | n
i += 1
n >>= 7
}
buf[i] = n
i += 1
return buf.slice(0, i).buffer
}

View File

@@ -8,18 +8,14 @@ describe("EmVer", () => {
checker.check("1.2")
checker.check("1.2.3")
checker.check("1.2.3.4")
// @ts-expect-error
checker.check("1.2.3.4.5")
// @ts-expect-error
checker.check("1.2.3.4.5.6")
expect(checker.check("1")).toEqual(true)
expect(checker.check("1.2")).toEqual(true)
expect(checker.check("1.2.3.4")).toEqual(true)
})
test("rangeOf('*') invalid", () => {
// @ts-expect-error
expect(() => checker.check("a")).toThrow()
// @ts-expect-error
expect(() => checker.check("")).toThrow()
expect(() => checker.check("1..3")).toThrow()
})
@@ -31,7 +27,6 @@ describe("EmVer", () => {
expect(checker.check("2-beta123")).toEqual(true)
expect(checker.check("2")).toEqual(true)
expect(checker.check("1.2.3.5")).toEqual(true)
// @ts-expect-error
expect(checker.check("1.2.3.4.1")).toEqual(true)
})
@@ -58,7 +53,6 @@ describe("EmVer", () => {
test(`rangeOf(">=1.2.3.4") valid`, () => {
expect(checker.check("2")).toEqual(true)
expect(checker.check("1.2.3.5")).toEqual(true)
// @ts-expect-error
expect(checker.check("1.2.3.4.1")).toEqual(true)
expect(checker.check("1.2.3.4")).toEqual(true)
})
@@ -73,7 +67,6 @@ describe("EmVer", () => {
test(`rangeOf("<1.2.3.4") invalid`, () => {
expect(checker.check("2")).toEqual(false)
expect(checker.check("1.2.3.5")).toEqual(false)
// @ts-expect-error
expect(checker.check("1.2.3.4.1")).toEqual(false)
expect(checker.check("1.2.3.4")).toEqual(false)
})
@@ -88,7 +81,6 @@ describe("EmVer", () => {
test(`rangeOf("<=1.2.3.4") invalid`, () => {
expect(checker.check("2")).toEqual(false)
expect(checker.check("1.2.3.5")).toEqual(false)
// @ts-expect-error
expect(checker.check("1.2.3.4.1")).toEqual(false)
})
@@ -196,7 +188,6 @@ describe("EmVer", () => {
test(`rangeOf("!>1.2.3.4") invalid`, () => {
expect(checker.check("2")).toEqual(false)
expect(checker.check("1.2.3.5")).toEqual(false)
// @ts-expect-error
expect(checker.check("1.2.3.4.1")).toEqual(false)
})

View File

@@ -1,7 +1,7 @@
import * as matches from "ts-matches"
import * as YAML from "yaml"
import * as TOML from "@iarna/toml"
import _ from "lodash"
import merge from "lodash.merge"
import * as T from "../types"
import * as fs from "node:fs/promises"
@@ -82,7 +82,7 @@ export class FileHelper<A> {
async merge(data: A, effects: T.Effects) {
const fileData = (await this.read(effects).catch(() => ({}))) || {}
const mergeData = _.merge({}, fileData, data)
const mergeData = merge({}, fileData, data)
return await this.write(mergeData, effects)
}
/**

84
sdk/package-lock.json generated
View File

@@ -1,29 +1,32 @@
{
"name": "@start9labs/start-sdk",
"version": "0.3.6-alpha1",
"version": "0.3.6-alpha5",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@start9labs/start-sdk",
"version": "0.3.6-alpha1",
"version": "0.3.6-alpha5",
"license": "MIT",
"dependencies": {
"@iarna/toml": "^2.2.5",
"@noble/curves": "^1.4.0",
"@noble/hashes": "^1.4.0",
"isomorphic-fetch": "^3.0.0",
"lodash": "^4.17.21",
"ts-matches": "^5.4.1"
"lodash.merge": "^4.6.2",
"mime": "^4.0.3",
"ts-matches": "^5.5.1",
"yaml": "^2.2.2"
},
"devDependencies": {
"@iarna/toml": "^2.2.5",
"@types/jest": "^29.4.0",
"@types/lodash": "^4.17.5",
"@types/lodash.merge": "^4.6.2",
"jest": "^29.4.3",
"prettier": "^3.2.5",
"ts-jest": "^29.0.5",
"ts-node": "^10.9.1",
"tsx": "^4.7.1",
"typescript": "^5.0.4",
"yaml": "^2.2.2"
"typescript": "^5.0.4"
}
},
"node_modules/@ampproject/remapping": {
@@ -653,8 +656,7 @@
"node_modules/@iarna/toml": {
"version": "2.2.5",
"resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz",
"integrity": "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==",
"dev": true
"integrity": "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg=="
},
"node_modules/@istanbuljs/load-nyc-config": {
"version": "1.1.0",
@@ -1006,6 +1008,28 @@
"@jridgewell/sourcemap-codec": "1.4.14"
}
},
"node_modules/@noble/curves": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.4.0.tgz",
"integrity": "sha512-p+4cb332SFCrReJkCYe8Xzm0OWi4Jji5jVdIZRL/PmacmDkFNw6MrrV+gGpiPxLHbV+zKFRywUWbaseT+tZRXg==",
"dependencies": {
"@noble/hashes": "1.4.0"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@noble/hashes": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.4.0.tgz",
"integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==",
"engines": {
"node": ">= 16"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@sinclair/typebox": {
"version": "0.25.24",
"resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.25.24.tgz",
@@ -1144,6 +1168,15 @@
"integrity": "sha512-MBIOHVZqVqgfro1euRDWX7OO0fBVUUMrN6Pwm8LQsz8cWhEpihlvR70ENj3f40j58TNxZaWv2ndSkInykNBBJw==",
"dev": true
},
"node_modules/@types/lodash.merge": {
"version": "4.6.9",
"resolved": "https://registry.npmjs.org/@types/lodash.merge/-/lodash.merge-4.6.9.tgz",
"integrity": "sha512-23sHDPmzd59kUgWyKGiOMO2Qb9YtqRO/x4IhkgNUiPQ1+5MUVqi6bCZeq9nBJ17msjIMbEIO5u+XW4Kz6aGUhQ==",
"dev": true,
"dependencies": {
"@types/lodash": "*"
}
},
"node_modules/@types/node": {
"version": "18.15.10",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.10.tgz",
@@ -2849,17 +2882,17 @@
"node": ">=8"
}
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/lodash.memoize": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
"integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==",
"dev": true
},
"node_modules/lodash.merge": {
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="
},
"node_modules/lru-cache": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
@@ -2918,6 +2951,20 @@
"node": ">=8.6"
}
},
"node_modules/mime": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/mime/-/mime-4.0.3.tgz",
"integrity": "sha512-KgUb15Oorc0NEKPbvfa0wRU+PItIEZmiv+pyAO2i0oTIVTJhlzMclU7w4RXWQrSOVH5ax/p/CkIO7KI4OyFJTQ==",
"funding": [
"https://github.com/sponsors/broofa"
],
"bin": {
"mime": "bin/cli.js"
},
"engines": {
"node": ">=16"
}
},
"node_modules/mimic-fn": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
@@ -3580,9 +3627,9 @@
"dev": true
},
"node_modules/ts-matches": {
"version": "5.4.1",
"resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-5.4.1.tgz",
"integrity": "sha512-kXrY75F0s0WD15N2bWKDScKlKgwnusN6dTRzGs1N7LlxQRnazrsBISC1HL4sy2adsyk65Zbx3Ui3IGN8leAFOQ=="
"version": "5.5.1",
"resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-5.5.1.tgz",
"integrity": "sha512-UFYaKgfqlg9FROK7bdpYqFwG1CJvP4kOJdjXuWoqxo9jCmANoDw1GxkSCpJgoTeIiSTaTH5Qr1klSspb8c+ydg=="
},
"node_modules/ts-node": {
"version": "10.9.1",
@@ -4249,7 +4296,6 @@
"version": "2.2.2",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.2.2.tgz",
"integrity": "sha512-CBKFWExMn46Foo4cldiChEzn7S7SRV+wqiluAb6xmueD/fGyRHIhX8m14vVGgeFWjN540nKCNVj6P21eQjgTuA==",
"dev": true,
"engines": {
"node": ">= 14"
}

View File

@@ -31,10 +31,13 @@
"homepage": "https://github.com/Start9Labs/start-sdk#readme",
"dependencies": {
"isomorphic-fetch": "^3.0.0",
"lodash": "^4.17.21",
"ts-matches": "^5.4.1",
"lodash.merge": "^4.6.2",
"mime": "^4.0.3",
"ts-matches": "^5.5.1",
"yaml": "^2.2.2",
"@iarna/toml": "^2.2.5"
"@iarna/toml": "^2.2.5",
"@noble/curves": "^1.4.0",
"@noble/hashes": "^1.4.0"
},
"prettier": {
"trailingComma": "all",
@@ -44,7 +47,7 @@
},
"devDependencies": {
"@types/jest": "^29.4.0",
"@types/lodash": "^4.17.5",
"@types/lodash.merge": "^4.6.2",
"jest": "^29.4.3",
"prettier": "^3.2.5",
"ts-jest": "^29.0.5",

View File

@@ -55,7 +55,7 @@ pub fn validate_configuration(
Ok(_) => {
// create temp config file
serde_yaml::to_writer(
std::fs::File::create(config_path.with_extension("tmp"))?,
std::fs::create_file(config_path.with_extension("tmp"))?,
&config,
)?;
std::fs::rename(config_path.with_extension("tmp"), config_path)?;

80
web/package-lock.json generated
View File

@@ -23,6 +23,8 @@
"@ng-web-apis/common": "^2.0.0",
"@ng-web-apis/mutation-observer": "^2.0.0",
"@ng-web-apis/resize-observer": "^2.0.0",
"@noble/curves": "^1.4.0",
"@noble/hashes": "^1.4.0",
"@start9labs/argon2": "^0.2.2",
"@start9labs/emver": "^0.1.5",
"@start9labs/start-sdk": "file:../sdk/dist",
@@ -35,6 +37,7 @@
"angular-svg-round-progressbar": "^9.0.0",
"ansi-to-html": "^0.7.2",
"base64-js": "^1.5.1",
"buffer": "^6.0.3",
"cbor": "npm:@jprochazk/cbor@^0.4.9",
"cbor-web": "^8.1.0",
"core-js": "^3.21.1",
@@ -45,6 +48,7 @@
"jose": "^4.9.0",
"js-yaml": "^4.1.0",
"marked": "^4.0.0",
"mime": "^4.0.3",
"monaco-editor": "^0.33.0",
"mustache": "^4.2.0",
"ng-qrcode": "^7.0.0",
@@ -53,7 +57,7 @@
"pbkdf2": "^3.1.2",
"rxjs": "^7.8.1",
"swiper": "^8.2.4",
"ts-matches": "^5.2.1",
"ts-matches": "^5.5.1",
"tslib": "^2.3.0",
"uuid": "^8.3.2",
"zone.js": "^0.11.5"
@@ -1978,14 +1982,17 @@
"license": "MIT",
"dependencies": {
"@iarna/toml": "^2.2.5",
"@noble/curves": "^1.4.0",
"@noble/hashes": "^1.4.0",
"isomorphic-fetch": "^3.0.0",
"lodash": "^4.17.21",
"ts-matches": "^5.4.1",
"lodash.merge": "^4.6.2",
"mime": "^4.0.3",
"ts-matches": "^5.5.1",
"yaml": "^2.2.2"
},
"devDependencies": {
"@types/jest": "^29.4.0",
"@types/lodash": "^4.17.5",
"@types/lodash.merge": "^4.6.2",
"jest": "^29.4.3",
"prettier": "^3.2.5",
"ts-jest": "^29.0.5",
@@ -5111,6 +5118,28 @@
"@ng-web-apis/common": ">=2.0.0"
}
},
"node_modules/@noble/curves": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.4.0.tgz",
"integrity": "sha512-p+4cb332SFCrReJkCYe8Xzm0OWi4Jji5jVdIZRL/PmacmDkFNw6MrrV+gGpiPxLHbV+zKFRywUWbaseT+tZRXg==",
"dependencies": {
"@noble/hashes": "1.4.0"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@noble/hashes": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.4.0.tgz",
"integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==",
"engines": {
"node": ">= 16"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@nodelib/fs.scandir": {
"version": "2.1.5",
"devOptional": true,
@@ -9954,6 +9983,19 @@
"node": ">=6"
}
},
"node_modules/less/node_modules/mime": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
"integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==",
"dev": true,
"optional": true,
"bin": {
"mime": "cli.js"
},
"engines": {
"node": ">=4"
}
},
"node_modules/less/node_modules/pify": {
"version": "4.0.1",
"dev": true,
@@ -10551,14 +10593,17 @@
}
},
"node_modules/mime": {
"version": "1.6.0",
"dev": true,
"license": "MIT",
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/mime/-/mime-4.0.3.tgz",
"integrity": "sha512-KgUb15Oorc0NEKPbvfa0wRU+PItIEZmiv+pyAO2i0oTIVTJhlzMclU7w4RXWQrSOVH5ax/p/CkIO7KI4OyFJTQ==",
"funding": [
"https://github.com/sponsors/broofa"
],
"bin": {
"mime": "cli.js"
"mime": "bin/cli.js"
},
"engines": {
"node": ">=4"
"node": ">=16"
}
},
"node_modules/mime-db": {
@@ -13644,6 +13689,18 @@
"node": ">= 0.8"
}
},
"node_modules/send/node_modules/mime": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
"integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==",
"dev": true,
"bin": {
"mime": "cli.js"
},
"engines": {
"node": ">=4"
}
},
"node_modules/send/node_modules/ms": {
"version": "2.1.3",
"dev": true,
@@ -14617,8 +14674,9 @@
}
},
"node_modules/ts-matches": {
"version": "v5.2.1",
"license": "MIT"
"version": "5.5.1",
"resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-5.5.1.tgz",
"integrity": "sha512-UFYaKgfqlg9FROK7bdpYqFwG1CJvP4kOJdjXuWoqxo9jCmANoDw1GxkSCpJgoTeIiSTaTH5Qr1klSspb8c+ydg=="
},
"node_modules/ts-morph": {
"version": "10.0.2",

View File

@@ -46,6 +46,8 @@
"@ng-web-apis/common": "^2.0.0",
"@ng-web-apis/mutation-observer": "^2.0.0",
"@ng-web-apis/resize-observer": "^2.0.0",
"@noble/curves": "^1.4.0",
"@noble/hashes": "^1.4.0",
"@start9labs/argon2": "^0.2.2",
"@start9labs/emver": "^0.1.5",
"@start9labs/start-sdk": "file:../sdk/dist",
@@ -58,6 +60,7 @@
"angular-svg-round-progressbar": "^9.0.0",
"ansi-to-html": "^0.7.2",
"base64-js": "^1.5.1",
"buffer": "^6.0.3",
"cbor": "npm:@jprochazk/cbor@^0.4.9",
"cbor-web": "^8.1.0",
"core-js": "^3.21.1",
@@ -68,6 +71,7 @@
"jose": "^4.9.0",
"js-yaml": "^4.1.0",
"marked": "^4.0.0",
"mime": "^4.0.3",
"monaco-editor": "^0.33.0",
"mustache": "^4.2.0",
"ng-qrcode": "^7.0.0",
@@ -76,7 +80,7 @@
"pbkdf2": "^3.1.2",
"rxjs": "^7.8.1",
"swiper": "^8.2.4",
"ts-matches": "^5.2.1",
"ts-matches": "^5.5.1",
"tslib": "^2.3.0",
"uuid": "^8.3.2",
"zone.js": "^0.11.5"

View File

@@ -4,14 +4,15 @@ import { ApiService } from 'src/app/services/api/embassy-api.service'
import { ConfigService } from 'src/app/services/config.service'
import cbor from 'cbor'
import { ErrorToastService } from '@start9labs/shared'
import { T } from '@start9labs/start-sdk'
import { S9pk, T } from '@start9labs/start-sdk'
interface Positions {
[key: string]: [bigint, bigint] // [position, length]
}
const MAGIC = new Uint8Array([59, 59])
const VERSION = new Uint8Array([1])
const VERSION_1 = new Uint8Array([1])
const VERSION_2 = new Uint8Array([2])
@Component({
selector: 'sideload',
@@ -64,11 +65,36 @@ export class SideloadPage {
async validateS9pk(file: File) {
const magic = new Uint8Array(await blobToBuffer(file.slice(0, 2)))
const version = new Uint8Array(await blobToBuffer(file.slice(2, 3)))
if (compare(magic, MAGIC) && compare(version, VERSION)) {
await this.parseS9pk(file)
return {
invalid: false,
message: 'A valid package file has been detected!',
if (compare(magic, MAGIC)) {
try {
if (compare(version, VERSION_1)) {
await this.parseS9pkV1(file)
return {
invalid: false,
message: 'A valid package file has been detected!',
}
} else if (compare(version, VERSION_2)) {
await this.parseS9pkV2(file)
return {
invalid: false,
message: 'A valid package file has been detected!',
}
} else {
console.error(version)
return {
invalid: true,
message: 'Invalid package file',
}
}
} catch (e) {
console.error(e)
return {
invalid: true,
message:
e instanceof Error
? `Invalid package file: ${e.message}`
: 'Invalid package file',
}
}
} else {
return {
@@ -91,12 +117,9 @@ export class SideloadPage {
})
await loader.present()
try {
const guid = await this.api.sideloadPackage({
manifest: this.toUpload.manifest!,
icon: this.toUpload.icon!,
})
const res = await this.api.sideloadPackage()
this.api
.uploadPackage(guid, this.toUpload.file!)
.uploadPackage(res.upload, this.toUpload.file!)
.catch(e => console.error(e))
this.navCtrl.navigateRoot('/services')
@@ -108,7 +131,7 @@ export class SideloadPage {
}
}
async parseS9pk(file: File) {
async parseS9pkV1(file: File) {
const positions: Positions = {}
// magic=2bytes, version=1bytes, pubkey=32bytes, signature=64bytes, toc_length=4bytes = 103byte is starting point
let start = 103
@@ -122,6 +145,12 @@ export class SideloadPage {
await this.getIcon(positions, file)
}
async parseS9pkV2(file: File) {
const s9pk = await S9pk.deserialize(file, null)
this.toUpload.manifest = s9pk.manifest
this.toUpload.icon = await s9pk.icon()
}
async getManifest(positions: Positions, file: Blob) {
const data = await blobToBuffer(
file.slice(
@@ -225,6 +254,7 @@ async function readBlobToArrayBuffer(
}
function compare(a: Uint8Array, b: Uint8Array) {
if (a.length !== b.length) return false
for (let i = 0; i < a.length; i++) {
if (a[i] !== b[i]) return false
}

View File

@@ -273,7 +273,10 @@ export module RR {
manifest: T.Manifest
icon: string // base64
}
export type SideloadPacakgeRes = string //guid
export type SideloadPackageRes = {
upload: string // guid
progress: string // guid
}
// marketplace

View File

@@ -243,7 +243,5 @@ export abstract class ApiService {
params: RR.DryConfigureDependencyReq,
): Promise<RR.DryConfigureDependencyRes>
abstract sideloadPackage(
params: RR.SideloadPackageReq,
): Promise<RR.SideloadPacakgeRes>
abstract sideloadPackage(): Promise<RR.SideloadPackageRes>
}

View File

@@ -29,7 +29,7 @@ export class LiveApiService extends ApiService {
@Inject(PATCH_CACHE) private readonly cache$: Observable<Dump<DataModel>>,
) {
super()
; (window as any).rpcClient = this
;(window as any).rpcClient = this
}
// for getting static files: ex icons, instructions, licenses
@@ -460,12 +460,10 @@ export class LiveApiService extends ApiService {
})
}
async sideloadPackage(
params: RR.SideloadPackageReq,
): Promise<RR.SideloadPacakgeRes> {
async sideloadPackage(): Promise<RR.SideloadPackageRes> {
return this.rpcRequest({
method: 'package.sideload',
params,
params: {},
})
}

View File

@@ -1062,11 +1062,12 @@ export class MockApiService extends ApiService {
}
}
async sideloadPackage(
params: RR.SideloadPackageReq,
): Promise<RR.SideloadPacakgeRes> {
async sideloadPackage(): Promise<RR.SideloadPackageRes> {
await pauseFor(2000)
return '4120e092-05ab-4de2-9fbd-c3f1f4b1df9e' // no significance, randomly generated
return {
upload: '4120e092-05ab-4de2-9fbd-c3f1f4b1df9e', // no significance, randomly generated
progress: '5120e092-05ab-4de2-9fbd-c3f1f4b1df9e', // no significance, randomly generated
}
}
private async initProgress(): Promise<T.FullProgress> {

Some files were not shown because too many files have changed in this diff Show More