mirror of
https://github.com/Start9Labs/start-os.git
synced 2026-04-04 22:39:46 +00:00
Refactor/actions (#2733)
* store, properties, manifest * interfaces * init and backups * fix init and backups * file models * more versions * dependencies * config except dynamic types * clean up config * remove disabled from non-dynamic vaues * actions * standardize example code block formats * wip: actions refactor Co-authored-by: Jade <Blu-J@users.noreply.github.com> * commit types * fix types * update types * update action request type * update apis * add description to actionrequest * clean up imports * revert package json * chore: Remove the recursive to the index * chore: Remove the other thing I was testing * flatten action requests * update container runtime with new config paradigm * new actions strategy * seems to be working * misc backend fixes * fix fe bugs * only show breakages if breakages * only show success modal if result * don't panic on failed removal * hide config from actions page * polyfill autoconfig * use metadata strategy for actions instead of prev * misc fixes * chore: split the sdk into 2 libs (#2736) * follow sideload progress (#2718) * follow sideload progress * small bugfix * shareReplay with no refcount false * don't wrap sideload progress in RPCResult * dont present toast --------- Co-authored-by: Aiden McClelland <me@drbonez.dev> * chore: Add the initial of the creation of the two sdk * chore: Add in the baseDist * chore: Add in the baseDist * chore: Get the web and the runtime-container running * chore: Remove the empty file * chore: Fix it so the container-runtime works --------- Co-authored-by: Matt Hill <MattDHill@users.noreply.github.com> Co-authored-by: Aiden McClelland <me@drbonez.dev> * misc fixes * update todos * minor clean up * fix link script * update node version in CI test * fix node version syntax in ci build * wip: fixing callbacks * fix sdk makefile dependencies * add support for const outside of main * update apis * don't panic! * Chore: Capture weird case on rpc, and log that * fix procedure id issue * pass input value for dep auto config * handle disabled and warning for actions * chore: Fix for link not having node_modules * sdk fixes * fix build * fix build * fix build --------- Co-authored-by: Matt Hill <mattnine@protonmail.com> Co-authored-by: Jade <Blu-J@users.noreply.github.com> Co-authored-by: J H <dragondef@gmail.com> Co-authored-by: Jade <2364004+Blu-J@users.noreply.github.com> Co-authored-by: Matt Hill <MattDHill@users.noreply.github.com>
This commit is contained in:
67
sdk/base/lib/s9pk/index.ts
Normal file
67
sdk/base/lib/s9pk/index.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { DataUrl, Manifest, MerkleArchiveCommitment } from "../osBindings"
|
||||
import { ArrayBufferReader, MerkleArchive } from "./merkleArchive"
|
||||
import mime from "mime"
|
||||
|
||||
const magicAndVersion = new Uint8Array([59, 59, 2])
|
||||
|
||||
export function compare(a: Uint8Array, b: Uint8Array) {
|
||||
if (a.length !== b.length) return false
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
if (a[i] !== b[i]) return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
export class S9pk {
|
||||
private constructor(
|
||||
readonly manifest: Manifest,
|
||||
readonly archive: MerkleArchive,
|
||||
readonly size: number,
|
||||
) {}
|
||||
static async deserialize(
|
||||
source: Blob,
|
||||
commitment: MerkleArchiveCommitment | null,
|
||||
): Promise<S9pk> {
|
||||
const header = new ArrayBufferReader(
|
||||
await source
|
||||
.slice(0, magicAndVersion.length + MerkleArchive.headerSize)
|
||||
.arrayBuffer(),
|
||||
)
|
||||
const magicVersion = new Uint8Array(header.next(magicAndVersion.length))
|
||||
if (!compare(magicVersion, magicAndVersion)) {
|
||||
throw new Error("Invalid Magic or Unexpected Version")
|
||||
}
|
||||
|
||||
const archive = await MerkleArchive.deserialize(
|
||||
source,
|
||||
"s9pk",
|
||||
header,
|
||||
commitment,
|
||||
)
|
||||
|
||||
const manifest = JSON.parse(
|
||||
new TextDecoder().decode(
|
||||
await archive.contents
|
||||
.getPath(["manifest.json"])
|
||||
?.verifiedFileContents(),
|
||||
),
|
||||
)
|
||||
|
||||
return new S9pk(manifest, archive, source.length)
|
||||
}
|
||||
async icon(): Promise<DataUrl> {
|
||||
const iconName = Object.keys(this.archive.contents.contents).find(
|
||||
(name) =>
|
||||
name.startsWith("icon.") && mime.getType(name)?.startsWith("image/"),
|
||||
)
|
||||
if (!iconName) {
|
||||
throw new Error("no icon found in archive")
|
||||
}
|
||||
return (
|
||||
`data:${mime.getType(iconName)};base64,` +
|
||||
Buffer.from(
|
||||
await this.archive.contents.getPath([iconName])!.verifiedFileContents(),
|
||||
).toString("base64")
|
||||
)
|
||||
}
|
||||
}
|
||||
80
sdk/base/lib/s9pk/merkleArchive/directoryContents.ts
Normal file
80
sdk/base/lib/s9pk/merkleArchive/directoryContents.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { ArrayBufferReader, Entry } from "."
|
||||
import { blake3 } from "@noble/hashes/blake3"
|
||||
import { serializeVarint } from "./varint"
|
||||
import { FileContents } from "./fileContents"
|
||||
import { compare } from ".."
|
||||
|
||||
export class DirectoryContents {
|
||||
static readonly headerSize =
|
||||
8 + // position: u64 BE
|
||||
8 // size: u64 BE
|
||||
private constructor(readonly contents: { [name: string]: Entry }) {}
|
||||
static async deserialize(
|
||||
source: Blob,
|
||||
header: ArrayBufferReader,
|
||||
sighash: Uint8Array,
|
||||
maxSize: bigint,
|
||||
): Promise<DirectoryContents> {
|
||||
const position = header.nextU64()
|
||||
const size = header.nextU64()
|
||||
if (size > maxSize) {
|
||||
throw new Error("size is greater than signed")
|
||||
}
|
||||
|
||||
const tocReader = new ArrayBufferReader(
|
||||
await source
|
||||
.slice(Number(position), Number(position + size))
|
||||
.arrayBuffer(),
|
||||
)
|
||||
const len = tocReader.nextVarint()
|
||||
const entries: { [name: string]: Entry } = {}
|
||||
for (let i = 0; i < len; i++) {
|
||||
const name = tocReader.nextVarstring()
|
||||
const entry = await Entry.deserialize(source, tocReader)
|
||||
entries[name] = entry
|
||||
}
|
||||
|
||||
const res = new DirectoryContents(entries)
|
||||
|
||||
if (!compare(res.sighash(), sighash)) {
|
||||
throw new Error("hash sum does not match")
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
sighash(): Uint8Array {
|
||||
const hasher = blake3.create({})
|
||||
const names = Object.keys(this.contents).sort()
|
||||
hasher.update(new Uint8Array(serializeVarint(names.length)))
|
||||
for (const name of names) {
|
||||
const entry = this.contents[name]
|
||||
const nameBuf = new TextEncoder().encode(name)
|
||||
hasher.update(new Uint8Array(serializeVarint(nameBuf.length)))
|
||||
hasher.update(nameBuf)
|
||||
hasher.update(new Uint8Array(entry.hash))
|
||||
const sizeBuf = new Uint8Array(8)
|
||||
new DataView(sizeBuf.buffer).setBigUint64(0, entry.size)
|
||||
hasher.update(sizeBuf)
|
||||
hasher.update(new Uint8Array([0]))
|
||||
}
|
||||
|
||||
return hasher.digest()
|
||||
}
|
||||
getPath(path: string[]): Entry | null {
|
||||
if (path.length === 0) {
|
||||
return null
|
||||
}
|
||||
const next = this.contents[path[0]]
|
||||
const rest = path.slice(1)
|
||||
if (next === undefined) {
|
||||
return null
|
||||
}
|
||||
if (rest.length === 0) {
|
||||
return next
|
||||
}
|
||||
if (next.contents instanceof DirectoryContents) {
|
||||
return next.contents.getPath(rest)
|
||||
}
|
||||
return null
|
||||
}
|
||||
}
|
||||
24
sdk/base/lib/s9pk/merkleArchive/fileContents.ts
Normal file
24
sdk/base/lib/s9pk/merkleArchive/fileContents.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { blake3 } from "@noble/hashes/blake3"
|
||||
import { ArrayBufferReader } from "."
|
||||
import { compare } from ".."
|
||||
|
||||
export class FileContents {
|
||||
private constructor(readonly contents: Blob) {}
|
||||
static deserialize(
|
||||
source: Blob,
|
||||
header: ArrayBufferReader,
|
||||
size: bigint,
|
||||
): FileContents {
|
||||
const position = header.nextU64()
|
||||
return new FileContents(
|
||||
source.slice(Number(position), Number(position + size)),
|
||||
)
|
||||
}
|
||||
async verified(hash: Uint8Array): Promise<ArrayBuffer> {
|
||||
const res = await this.contents.arrayBuffer()
|
||||
if (!compare(hash, blake3(new Uint8Array(res)))) {
|
||||
throw new Error("hash sum mismatch")
|
||||
}
|
||||
return res
|
||||
}
|
||||
}
|
||||
167
sdk/base/lib/s9pk/merkleArchive/index.ts
Normal file
167
sdk/base/lib/s9pk/merkleArchive/index.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
import { MerkleArchiveCommitment } from "../../osBindings"
|
||||
import { DirectoryContents } from "./directoryContents"
|
||||
import { FileContents } from "./fileContents"
|
||||
import { ed25519ph } from "@noble/curves/ed25519"
|
||||
import { sha512 } from "@noble/hashes/sha2"
|
||||
import { VarIntProcessor } from "./varint"
|
||||
import { compare } from ".."
|
||||
|
||||
const maxVarstringLen = 1024 * 1024
|
||||
|
||||
export type Signer = {
|
||||
pubkey: Uint8Array
|
||||
signature: Uint8Array
|
||||
maxSize: bigint
|
||||
context: string
|
||||
}
|
||||
|
||||
export class ArrayBufferReader {
|
||||
constructor(private buffer: ArrayBuffer) {}
|
||||
next(length: number): ArrayBuffer {
|
||||
const res = this.buffer.slice(0, length)
|
||||
this.buffer = this.buffer.slice(length)
|
||||
return res
|
||||
}
|
||||
nextU64(): bigint {
|
||||
return new DataView(this.next(8)).getBigUint64(0)
|
||||
}
|
||||
nextVarint(): number {
|
||||
const p = new VarIntProcessor()
|
||||
while (!p.finished()) {
|
||||
p.push(new Uint8Array(this.buffer.slice(0, 1))[0])
|
||||
this.buffer = this.buffer.slice(1)
|
||||
}
|
||||
const res = p.decode()
|
||||
if (res === null) {
|
||||
throw new Error("Reached EOF")
|
||||
}
|
||||
return res
|
||||
}
|
||||
nextVarstring(): string {
|
||||
const len = Math.min(this.nextVarint(), maxVarstringLen)
|
||||
return new TextDecoder().decode(this.next(len))
|
||||
}
|
||||
}
|
||||
|
||||
export class MerkleArchive {
|
||||
static readonly headerSize =
|
||||
32 + // pubkey
|
||||
64 + // signature
|
||||
32 + // sighash
|
||||
8 + // size
|
||||
DirectoryContents.headerSize
|
||||
private constructor(
|
||||
readonly signer: Signer,
|
||||
readonly contents: DirectoryContents,
|
||||
) {}
|
||||
static async deserialize(
|
||||
source: Blob,
|
||||
context: string,
|
||||
header: ArrayBufferReader,
|
||||
commitment: MerkleArchiveCommitment | null,
|
||||
): Promise<MerkleArchive> {
|
||||
const pubkey = new Uint8Array(header.next(32))
|
||||
const signature = new Uint8Array(header.next(64))
|
||||
const sighash = new Uint8Array(header.next(32))
|
||||
const rootMaxSizeBytes = header.next(8)
|
||||
const maxSize = new DataView(rootMaxSizeBytes).getBigUint64(0)
|
||||
|
||||
if (
|
||||
!ed25519ph.verify(
|
||||
signature,
|
||||
new Uint8Array(
|
||||
await new Blob([sighash, rootMaxSizeBytes]).arrayBuffer(),
|
||||
),
|
||||
pubkey,
|
||||
{
|
||||
context: new TextEncoder().encode(context),
|
||||
zip215: true,
|
||||
},
|
||||
)
|
||||
) {
|
||||
throw new Error("signature verification failed")
|
||||
}
|
||||
|
||||
if (commitment) {
|
||||
if (
|
||||
!compare(
|
||||
sighash,
|
||||
new Uint8Array(Buffer.from(commitment.rootSighash, "base64").buffer),
|
||||
)
|
||||
) {
|
||||
throw new Error("merkle root mismatch")
|
||||
}
|
||||
if (maxSize > commitment.rootMaxsize) {
|
||||
throw new Error("root directory max size too large")
|
||||
}
|
||||
} else if (maxSize > 1024 * 1024) {
|
||||
throw new Error(
|
||||
"root directory max size over 1MiB, cancelling download in case of DOS attack",
|
||||
)
|
||||
}
|
||||
|
||||
const contents = await DirectoryContents.deserialize(
|
||||
source,
|
||||
header,
|
||||
sighash,
|
||||
maxSize,
|
||||
)
|
||||
|
||||
return new MerkleArchive(
|
||||
{
|
||||
pubkey,
|
||||
signature,
|
||||
maxSize,
|
||||
context,
|
||||
},
|
||||
contents,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export class Entry {
|
||||
private constructor(
|
||||
readonly hash: Uint8Array,
|
||||
readonly size: bigint,
|
||||
readonly contents: EntryContents,
|
||||
) {}
|
||||
static async deserialize(
|
||||
source: Blob,
|
||||
header: ArrayBufferReader,
|
||||
): Promise<Entry> {
|
||||
const hash = new Uint8Array(header.next(32))
|
||||
const size = header.nextU64()
|
||||
const contents = await deserializeEntryContents(source, header, hash, size)
|
||||
|
||||
return new Entry(new Uint8Array(hash), size, contents)
|
||||
}
|
||||
async verifiedFileContents(): Promise<ArrayBuffer> {
|
||||
if (!this.contents) {
|
||||
throw new Error("file is missing from archive")
|
||||
}
|
||||
if (!(this.contents instanceof FileContents)) {
|
||||
throw new Error("is not a regular file")
|
||||
}
|
||||
return this.contents.verified(this.hash)
|
||||
}
|
||||
}
|
||||
|
||||
export type EntryContents = null | FileContents | DirectoryContents
|
||||
async function deserializeEntryContents(
|
||||
source: Blob,
|
||||
header: ArrayBufferReader,
|
||||
hash: Uint8Array,
|
||||
size: bigint,
|
||||
): Promise<EntryContents> {
|
||||
const typeId = new Uint8Array(header.next(1))[0]
|
||||
switch (typeId) {
|
||||
case 0:
|
||||
return null
|
||||
case 1:
|
||||
return FileContents.deserialize(source, header, size)
|
||||
case 2:
|
||||
return DirectoryContents.deserialize(source, header, hash, size)
|
||||
default:
|
||||
throw new Error(`Unknown type id ${typeId} found in MerkleArchive`)
|
||||
}
|
||||
}
|
||||
64
sdk/base/lib/s9pk/merkleArchive/varint.ts
Normal file
64
sdk/base/lib/s9pk/merkleArchive/varint.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { asError } from "../../util"
|
||||
|
||||
const msb = 0x80
|
||||
const dropMsb = 0x7f
|
||||
const maxSize = Math.floor((8 * 8 + 7) / 7)
|
||||
|
||||
export class VarIntProcessor {
|
||||
private buf: Uint8Array
|
||||
private i: number
|
||||
constructor() {
|
||||
this.buf = new Uint8Array(maxSize)
|
||||
this.i = 0
|
||||
}
|
||||
push(b: number) {
|
||||
if (this.i >= maxSize) {
|
||||
throw new Error("Unterminated varint")
|
||||
}
|
||||
this.buf[this.i] = b
|
||||
this.i += 1
|
||||
}
|
||||
finished(): boolean {
|
||||
return this.i > 0 && (this.buf[this.i - 1] & msb) === 0
|
||||
}
|
||||
decode(): number | null {
|
||||
let result = 0
|
||||
let shift = 0
|
||||
let success = false
|
||||
for (let i = 0; i < this.i; i++) {
|
||||
const b = this.buf[i]
|
||||
const msbDropped = b & dropMsb
|
||||
result |= msbDropped << shift
|
||||
shift += 7
|
||||
|
||||
if ((b & msb) == 0 || shift > 9 * 7) {
|
||||
success = (b & msb) === 0
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (success) {
|
||||
return result
|
||||
} else {
|
||||
console.error(asError(this.buf))
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function serializeVarint(int: number): ArrayBuffer {
|
||||
const buf = new Uint8Array(maxSize)
|
||||
let n = int
|
||||
let i = 0
|
||||
|
||||
while (n >= msb) {
|
||||
buf[i] = msb | n
|
||||
i += 1
|
||||
n >>= 7
|
||||
}
|
||||
|
||||
buf[i] = n
|
||||
i += 1
|
||||
|
||||
return buf.slice(0, i).buffer
|
||||
}
|
||||
Reference in New Issue
Block a user