Compare commits

..

1 Commits

Author SHA1 Message Date
Matt Hill
3d49689b1e remove unnecessary warnings 2026-03-31 13:21:37 -06:00
43 changed files with 1128 additions and 1238 deletions

View File

@@ -83,16 +83,11 @@ if [ ${#DEB_FILES[@]} -eq 0 ]; then
exit 1
fi
# Copy each deb to the pool, removing old versions of the same package+arch
# Copy each deb to the pool, renaming to standard format
for deb in "${DEB_FILES[@]}"; do
PKG_NAME="$(dpkg-deb --field "$deb" Package)"
PKG_ARCH="$(dpkg-deb --field "$deb" Architecture)"
POOL_DIR="$REPO_DIR/pool/${COMPONENT}/${PKG_NAME:0:1}/${PKG_NAME}"
mkdir -p "$POOL_DIR"
# Remove old versions for the same architecture
for old in "$POOL_DIR"/${PKG_NAME}_*_${PKG_ARCH}.deb; do
[ -f "$old" ] && rm -v "$old"
done
cp "$deb" "$POOL_DIR/"
dpkg-name -o "$POOL_DIR/$(basename "$deb")" 2>/dev/null || true
echo "Added: $(basename "$deb") -> pool/${COMPONENT}/${PKG_NAME:0:1}/${PKG_NAME}/"

View File

@@ -8,7 +8,6 @@ S3_BUCKET="s3://startos-images"
S3_CDN="https://startos-images.nyc3.cdn.digitaloceanspaces.com"
START9_GPG_KEY="2D63C217"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ARCHES="aarch64 aarch64-nonfree aarch64-nvidia riscv64 riscv64-nonfree x86_64 x86_64-nonfree x86_64-nvidia"
CLI_ARCHES="aarch64 riscv64 x86_64"
@@ -84,21 +83,16 @@ resolve_gh_user() {
GH_GPG_KEY=$(git config user.signingkey 2>/dev/null || true)
}
# Fetch the URL for an OS asset from the registry index.
# Usage: registry_url <iso|squashfs|img> <platform>
registry_url() {
local ext=$1 platform=$2
if [ -z "${_REGISTRY_INDEX:-}" ]; then
_REGISTRY_INDEX=$(start-cli --registry=$REGISTRY registry os index)
fi
echo "$_REGISTRY_INDEX" | jq -r ".versions[\"$VERSION\"].$ext[\"$platform\"].urls[0]"
}
# --- Subcommands ---
cmd_download() {
require_version
if [ -z "${RUN_ID:-}" ]; then
read -rp "RUN_ID (OS images, leave blank to skip): " RUN_ID
fi
RUN_ID=$(parse_run_id "${RUN_ID:-}")
if [ -z "${ST_RUN_ID:-}" ]; then
read -rp "ST_RUN_ID (start-tunnel, leave blank to skip): " ST_RUN_ID
fi
@@ -111,14 +105,14 @@ cmd_download() {
ensure_release_dir
# Download OS images from registry (deployed by GitHub workflow)
echo "Downloading OS images from registry..."
for arch in $ARCHES; do
for ext in squashfs iso; do
echo " $ext $arch"
start-cli --registry=$REGISTRY registry os asset get "$ext" "$VERSION" "$arch" -d "$(pwd)"
if [ -n "$RUN_ID" ]; then
for arch in $ARCHES; do
while ! gh run download -R $REPO "$RUN_ID" -n "$arch.squashfs" -D "$(pwd)"; do sleep 1; done
done
done
for arch in $ARCHES; do
while ! gh run download -R $REPO "$RUN_ID" -n "$arch.iso" -D "$(pwd)"; do sleep 1; done
done
fi
if [ -n "$ST_RUN_ID" ]; then
for arch in $CLI_ARCHES; do
@@ -149,12 +143,19 @@ cmd_pull() {
gh release download -R $REPO "v$VERSION" -p "$file" -D "$(pwd)" --clobber
done
# Download ISOs and squashfs from registry
echo "Downloading OS images from registry..."
# Download ISOs and squashfs from S3 CDN
for arch in $ARCHES; do
for ext in squashfs iso; do
echo " $ext $arch"
start-cli --registry=$REGISTRY registry os asset get "$ext" "$VERSION" "$arch" -d "$(pwd)"
# Get the actual filename from the GH release asset list or body
local filename
filename=$(gh release view -R $REPO "v$VERSION" --json assets -q ".assets[].name" | grep "_${arch}\\.${ext}$" || true)
if [ -z "$filename" ]; then
filename=$(gh release view -R $REPO "v$VERSION" --json body -q .body | grep -oP "[^ ]*_${arch}\\.${ext}" | head -1 || true)
fi
if [ -n "$filename" ]; then
echo "Downloading $filename from S3..."
curl -fSL -o "$filename" "$S3_CDN/v$VERSION/$filename"
fi
done
done
}
@@ -169,12 +170,14 @@ cmd_upload() {
require_version
enter_release_dir
# OS images (iso/squashfs) are already on S3 via the GitHub workflow.
# Upload only debs and CLI binaries to the GitHub Release.
for file in $(release_files); do
case "$file" in
*.iso|*.squashfs) ;;
*) gh release upload -R $REPO "v$VERSION" "$file" ;;
*.iso|*.squashfs)
s3cmd put -P "$file" "$S3_BUCKET/v$VERSION/$file"
;;
*)
gh release upload -R $REPO "v$VERSION" "$file"
;;
esac
done
}
@@ -245,24 +248,6 @@ cmd_cosign() {
echo "Done. Personal signatures for $GH_USER added to v$VERSION."
}
cmd_publish_tunnel() {
require_version
enter_release_dir
local tunnel_debs=()
for file in start-tunnel*.deb; do
[ -f "$file" ] && tunnel_debs+=("$file")
done
if [ ${#tunnel_debs[@]} -eq 0 ]; then
>&2 echo "No start-tunnel .deb files found in release directory"
exit 1
fi
echo "Publishing start-tunnel debs to apt repository..."
"$SCRIPT_DIR/apt/publish-deb.sh" "${tunnel_debs[@]}"
}
cmd_notes() {
require_version
enter_release_dir
@@ -270,14 +255,14 @@ cmd_notes() {
cat << EOF
# ISO Downloads
- [x86_64/AMD64]($(registry_url iso x86_64-nonfree))
- [x86_64/AMD64 + NVIDIA]($(registry_url iso x86_64-nvidia))
- [x86_64/AMD64-slim (FOSS-only)]($(registry_url iso x86_64) "Without proprietary software or drivers")
- [aarch64/ARM64]($(registry_url iso aarch64-nonfree))
- [aarch64/ARM64 + NVIDIA]($(registry_url iso aarch64-nvidia))
- [aarch64/ARM64-slim (FOSS-Only)]($(registry_url iso aarch64) "Without proprietary software or drivers")
- [RISCV64 (RVA23)]($(registry_url iso riscv64-nonfree))
- [RISCV64 (RVA23)-slim (FOSS-only)]($(registry_url iso riscv64) "Without proprietary software or drivers")
- [x86_64/AMD64]($S3_CDN/v$VERSION/$(ls *_x86_64-nonfree.iso))
- [x86_64/AMD64 + NVIDIA]($S3_CDN/v$VERSION/$(ls *_x86_64-nvidia.iso))
- [x86_64/AMD64-slim (FOSS-only)]($S3_CDN/v$VERSION/$(ls *_x86_64.iso) "Without proprietary software or drivers")
- [aarch64/ARM64]($S3_CDN/v$VERSION/$(ls *_aarch64-nonfree.iso))
- [aarch64/ARM64 + NVIDIA]($S3_CDN/v$VERSION/$(ls *_aarch64-nvidia.iso))
- [aarch64/ARM64-slim (FOSS-Only)]($S3_CDN/v$VERSION/$(ls *_aarch64.iso) "Without proprietary software or drivers")
- [RISCV64 (RVA23)]($S3_CDN/v$VERSION/$(ls *_riscv64-nonfree.iso))
- [RISCV64 (RVA23)-slim (FOSS-only)]($S3_CDN/v$VERSION/$(ls *_riscv64.iso) "Without proprietary software or drivers")
EOF
cat << 'EOF'
@@ -333,8 +318,9 @@ EOF
cmd_full_release() {
cmd_download
cmd_register
cmd_upload
cmd_publish_tunnel
cmd_index
cmd_sign
cmd_notes
}
@@ -344,23 +330,22 @@ usage() {
Usage: manage-release.sh <subcommand>
Subcommands:
download Download OS images from registry + other artifacts from GH Actions
OS images are pulled via start-cli from the registry (deployed by GH workflow)
Requires: ST_RUN_ID, CLI_RUN_ID (any combination)
pull Download an existing release from the GH tag and S3
register Register the version in the Start9 registry
upload Upload artifacts to GitHub Releases and S3
index Add assets to the registry index
publish-tunnel Publish start-tunnel .deb files to the apt repository
sign Sign all artifacts with Start9 org key (+ personal key if available)
and upload signatures.tar.gz
cosign Add personal GPG signature to an existing release's signatures
(requires 'pull' first so you can verify assets before signing)
notes Print release notes with download links and checksums
full-release Run: download → register → upload → publish-tunnel → sign → notes
download Download artifacts from GitHub Actions runs
Requires: RUN_ID, ST_RUN_ID, CLI_RUN_ID (any combination)
pull Download an existing release from the GH tag and S3
register Register the version in the Start9 registry
upload Upload artifacts to GitHub Releases and S3
index Add assets to the registry index
sign Sign all artifacts with Start9 org key (+ personal key if available)
and upload signatures.tar.gz
cosign Add personal GPG signature to an existing release's signatures
(requires 'pull' first so you can verify assets before signing)
notes Print release notes with download links and checksums
full-release Run: download → register → upload → index → sign → notes
Environment variables:
VERSION (required) Release version
RUN_ID GitHub Actions run ID for OS images (download subcommand)
ST_RUN_ID GitHub Actions run ID for start-tunnel (download subcommand)
CLI_RUN_ID GitHub Actions run ID for start-cli (download subcommand)
GH_USER Override GitHub username (default: autodetected via gh cli)
@@ -369,15 +354,14 @@ EOF
}
case "${1:-}" in
download) cmd_download ;;
pull) cmd_pull ;;
register) cmd_register ;;
upload) cmd_upload ;;
index) cmd_index ;;
publish-tunnel) cmd_publish_tunnel ;;
sign) cmd_sign ;;
cosign) cmd_cosign ;;
notes) cmd_notes ;;
full-release) cmd_full_release ;;
*) usage; exit 1 ;;
download) cmd_download ;;
pull) cmd_pull ;;
register) cmd_register ;;
upload) cmd_upload ;;
index) cmd_index ;;
sign) cmd_sign ;;
cosign) cmd_cosign ;;
notes) cmd_notes ;;
full-release) cmd_full_release ;;
*) usage; exit 1 ;;
esac

View File

@@ -37,7 +37,7 @@
},
"../sdk/dist": {
"name": "@start9labs/start-sdk",
"version": "1.0.0",
"version": "0.4.0-beta.66",
"license": "MIT",
"dependencies": {
"@iarna/toml": "^3.0.0",

View File

@@ -396,12 +396,6 @@ export class SystemForEmbassy implements System {
if (this.manifest.id === "nostr") {
this.manifest.id = "nostr-rs-relay"
}
if (this.manifest.id === "ghost") {
this.manifest.id = "ghost-legacy"
}
if (this.manifest.id === "synapse") {
this.manifest.id = "synapse-legacy"
}
}
async init(

40
core/Cargo.lock generated
View File

@@ -1967,18 +1967,6 @@ dependencies = [
"once_cell",
]
[[package]]
name = "fallible-iterator"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649"
[[package]]
name = "fallible-streaming-iterator"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
[[package]]
name = "fastrand"
version = "2.3.0"
@@ -3574,17 +3562,6 @@ dependencies = [
"redox_syscall 0.7.3",
]
[[package]]
name = "libsqlite3-sys"
version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbb8270bb4060bd76c6e96f20c52d80620f1d82a3470885694e41e0f81ef6fe7"
dependencies = [
"cc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "libyml"
version = "0.0.5"
@@ -5467,20 +5444,6 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "rusqlite"
version = "0.34.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37e34486da88d8e051c7c0e23c3f15fd806ea8546260aa2fec247e97242ec143"
dependencies = [
"bitflags 2.11.0",
"fallible-iterator",
"fallible-streaming-iterator",
"hashlink",
"libsqlite3-sys",
"smallvec",
]
[[package]]
name = "rust-argon2"
version = "3.0.0"
@@ -6476,7 +6439,7 @@ dependencies = [
[[package]]
name = "start-os"
version = "0.4.0-beta.0"
version = "0.4.0-alpha.23"
dependencies = [
"aes",
"async-acme",
@@ -6573,7 +6536,6 @@ dependencies = [
"reqwest_cookie_store",
"rpassword",
"rpc-toolkit",
"rusqlite",
"rust-argon2",
"rust-i18n",
"semver",

View File

@@ -15,7 +15,7 @@ license = "MIT"
name = "start-os"
readme = "README.md"
repository = "https://github.com/Start9Labs/start-os"
version = "0.4.0-beta.0" # VERSION_BUMP
version = "0.4.0-alpha.23" # VERSION_BUMP
[lib]
name = "startos"
@@ -182,7 +182,6 @@ qrcode = "0.14.1"
r3bl_tui = "0.7.6"
rand = "0.9.2"
regex = "1.10.2"
rusqlite = { version = "0.34", features = ["bundled"] }
reqwest = { version = "0.12.25", features = [
"http2",
"json",
@@ -251,3 +250,5 @@ opt-level = 3
[profile.dev.package.backtrace]
opt-level = 3
[profile.dev.package.sqlx-macros]
opt-level = 3

View File

@@ -1826,21 +1826,6 @@ registry.os.version.signer-not-authorized:
fr_FR: "Le signataire %{signer} n'est pas autorisé à signer pour v%{version}"
pl_PL: "Sygnatariusz %{signer} nie jest autoryzowany do podpisywania v%{version}"
# registry/os/promote.rs
registry.os.promote.need-from-or-to:
en_US: "At least one of --from or --to must be specified"
de_DE: "Mindestens --from oder --to muss angegeben werden"
es_ES: "Se debe especificar al menos --from o --to"
fr_FR: "Au moins --from ou --to doit être spécifié"
pl_PL: "Należy podać przynajmniej --from lub --to"
registry.os.promote.version-not-found:
en_US: "OS version %{version} not found on source registry"
de_DE: "OS-Version %{version} nicht in der Quell-Registry gefunden"
es_ES: "Versión del SO %{version} no encontrada en el registro de origen"
fr_FR: "Version OS %{version} introuvable dans le registre source"
pl_PL: "Wersja OS %{version} nie znaleziona w rejestrze źródłowym"
# registry/package/mod.rs
registry.package.remove-not-exist:
en_US: "%{id}@%{version}%{sighash} does not exist, so not removed"
@@ -1885,20 +1870,6 @@ registry.package.add-mirror.unauthorized:
fr_FR: "Non autorisé"
pl_PL: "Brak autoryzacji"
registry.package.promote.need-from-or-to:
en_US: "At least one of --from or --to must be specified"
de_DE: "Mindestens --from oder --to muss angegeben werden"
es_ES: "Se debe especificar al menos --from o --to"
fr_FR: "Au moins --from ou --to doit être spécifié"
pl_PL: "Należy podać przynajmniej --from lub --to"
registry.package.promote.version-not-found:
en_US: "Version %{version} of %{id} not found on source registry"
de_DE: "Version %{version} von %{id} nicht in der Quell-Registry gefunden"
es_ES: "Versión %{version} de %{id} no encontrada en el registro de origen"
fr_FR: "Version %{version} de %{id} introuvable dans le registre source"
pl_PL: "Wersja %{version} pakietu %{id} nie znaleziona w rejestrze źródłowym"
registry.package.cannot-remove-last-mirror:
en_US: "Cannot remove last mirror from an s9pk"
de_DE: "Letzter Spiegel kann nicht aus einem s9pk entfernt werden"
@@ -2931,13 +2902,6 @@ help.arg.force-uninstall:
fr_FR: "Ignorer les erreurs dans le script de désinitialisation du service"
pl_PL: "Ignoruj błędy w skrypcie deinicjalizacji usługi"
help.arg.from-registry-url:
en_US: "Source registry URL to promote from"
de_DE: "Quell-Registry-URL zum Heraufstufen"
es_ES: "URL del registro de origen para promover"
fr_FR: "URL du registre source pour la promotion"
pl_PL: "URL rejestru źródłowego do promowania"
help.arg.host-url:
en_US: "URL of the StartOS server"
de_DE: "URL des StartOS-Servers"
@@ -2959,13 +2923,6 @@ help.arg.icon-path:
fr_FR: "Chemin vers le fichier d'icône du service"
pl_PL: "Ścieżka do pliku ikony usługi"
help.arg.icon-source:
en_US: "Icon source: file path, file:// URL, http(s):// URL, or data: URL"
de_DE: "Icon-Quelle: Dateipfad, file://-URL, http(s)://-URL oder data:-URL"
es_ES: "Fuente del icono: ruta de archivo, URL file://, URL http(s):// o URL data:"
fr_FR: "Source de l'icône : chemin de fichier, URL file://, URL http(s):// ou URL data:"
pl_PL: "Źródło ikony: ścieżka pliku, URL file://, URL http(s):// lub URL data:"
help.arg.image-id:
en_US: "Docker image identifier"
de_DE: "Docker-Image-Kennung"
@@ -3050,34 +3007,6 @@ help.arg.merge:
fr_FR: "Fusionner avec la plage de versions existante au lieu de remplacer"
pl_PL: "Połącz z istniejącym zakresem wersji zamiast zastępować"
help.arg.metrics-after:
en_US: "Start of time range (RFC 3339 timestamp)"
de_DE: "Beginn des Zeitraums (RFC 3339 Zeitstempel)"
es_ES: "Inicio del rango de tiempo (marca de tiempo RFC 3339)"
fr_FR: "Début de la plage temporelle (horodatage RFC 3339)"
pl_PL: "Początek zakresu czasu (znacznik czasu RFC 3339)"
help.arg.metrics-before:
en_US: "End of time range (RFC 3339 timestamp)"
de_DE: "Ende des Zeitraums (RFC 3339 Zeitstempel)"
es_ES: "Fin del rango de tiempo (marca de tiempo RFC 3339)"
fr_FR: "Fin de la plage temporelle (horodatage RFC 3339)"
pl_PL: "Koniec zakresu czasu (znacznik czasu RFC 3339)"
help.arg.metrics-pkg-id:
en_US: "Filter by package ID"
de_DE: "Nach Paket-ID filtern"
es_ES: "Filtrar por ID de paquete"
fr_FR: "Filtrer par identifiant de paquet"
pl_PL: "Filtruj według identyfikatora pakietu"
help.arg.metrics-version:
en_US: "Filter by version"
de_DE: "Nach Version filtern"
es_ES: "Filtrar por versión"
fr_FR: "Filtrer par version"
pl_PL: "Filtruj według wersji"
help.arg.mirror-url:
en_US: "URL of the mirror"
de_DE: "URL des Spiegels"
@@ -3197,6 +3126,13 @@ help.arg.port:
fr_FR: "Numéro de port"
pl_PL: "Numer portu"
help.arg.postgres-connection-url:
en_US: "PostgreSQL connection URL"
de_DE: "PostgreSQL-Verbindungs-URL"
es_ES: "URL de conexión PostgreSQL"
fr_FR: "URL de connexion PostgreSQL"
pl_PL: "URL połączenia PostgreSQL"
help.arg.proxy-url:
en_US: "HTTP/SOCKS proxy URL"
de_DE: "HTTP/SOCKS-Proxy-URL"
@@ -3421,13 +3357,6 @@ help.arg.target-version-range:
fr_FR: "Contrainte de plage de version cible"
pl_PL: "Ograniczenie zakresu wersji docelowej"
help.arg.to-registry-url:
en_US: "Destination registry URL to promote to"
de_DE: "Ziel-Registry-URL zum Heraufstufen"
es_ES: "URL del registro de destino para promover"
fr_FR: "URL du registre de destination pour la promotion"
pl_PL: "URL rejestru docelowego do promowania"
help.arg.tor-proxy-url:
en_US: "Tor SOCKS proxy URL"
de_DE: "Tor-SOCKS-Proxy-URL"
@@ -4528,13 +4457,6 @@ about.commands-registry-db:
fr_FR: "Commandes pour interagir avec la base de données, comme dump et apply"
pl_PL: "Polecenia interakcji z bazą danych, np. dump i apply"
about.commands-registry-metrics:
en_US: "Query registry usage metrics"
de_DE: "Registry-Nutzungsmetriken abfragen"
es_ES: "Consultar métricas de uso del registro"
fr_FR: "Consulter les métriques d'utilisation du registre"
pl_PL: "Zapytaj o metryki użycia rejestru"
about.commands-registry-info:
en_US: "View or edit registry information"
de_DE: "Registry-Informationen anzeigen oder bearbeiten"
@@ -4976,27 +4898,6 @@ about.get-listen-address-for-webserver:
fr_FR: "Obtenir l'adresse d'écoute du serveur web"
pl_PL: "Pobierz adres nasłuchiwania serwera internetowego"
about.get-metrics-downloads:
en_US: "Count package download requests with optional filters"
de_DE: "Paket-Download-Anfragen mit optionalen Filtern zählen"
es_ES: "Contar solicitudes de descarga de paquetes con filtros opcionales"
fr_FR: "Compter les demandes de téléchargement de paquets avec filtres optionnels"
pl_PL: "Zlicz żądania pobrania pakietów z opcjonalnymi filtrami"
about.get-metrics-summary:
en_US: "Get a summary of registry usage metrics"
de_DE: "Zusammenfassung der Registry-Nutzungsmetriken abrufen"
es_ES: "Obtener un resumen de las métricas de uso del registro"
fr_FR: "Obtenir un résumé des métriques d'utilisation du registre"
pl_PL: "Pobierz podsumowanie metryk użycia rejestru"
about.get-metrics-users:
en_US: "Count unique active users within a time range"
de_DE: "Eindeutige aktive Benutzer in einem Zeitraum zählen"
es_ES: "Contar usuarios activos únicos dentro de un rango de tiempo"
fr_FR: "Compter les utilisateurs actifs uniques dans un intervalle de temps"
pl_PL: "Zlicz unikalnych aktywnych użytkowników w zakresie czasu"
about.get-os-versions-info:
en_US: "Get OS versions info"
de_DE: "Informationen zu Betriebssystemversionen abrufen"
@@ -5333,20 +5234,6 @@ about.persist-new-notification:
fr_FR: "Persister une nouvelle notification"
pl_PL: "Utrwal nowe powiadomienie"
about.promote-os-registry:
en_US: "Promote an OS version from one registry to another"
de_DE: "Eine OS-Version von einer Registry in eine andere heraufstufen"
es_ES: "Promover una versión del SO de un registro a otro"
fr_FR: "Promouvoir une version OS d'un registre à un autre"
pl_PL: "Promuj wersję OS z jednego rejestru do drugiego"
about.promote-package-registry:
en_US: "Promote a package from one registry to another"
de_DE: "Ein Paket von einer Registry in eine andere heraufstufen"
es_ES: "Promover un paquete de un registro a otro"
fr_FR: "Promouvoir un paquet d'un registre à un autre"
pl_PL: "Promuj pakiet z jednego rejestru do drugiego"
about.publish-s9pk:
en_US: "Publish s9pk to S3 bucket and index on registry"
de_DE: "S9pk in S3-Bucket veröffentlichen und in Registry indizieren"

View File

@@ -55,6 +55,8 @@ impl Public {
) -> Result<Self, Error> {
Ok(Self {
server_info: ServerInfo {
arch: get_arch(),
platform: get_platform(),
id: account.server_id.clone(),
version: Current::default().semver(),
name: account.hostname.name.clone(),
@@ -158,6 +160,14 @@ impl Public {
}
}
fn get_arch() -> InternedString {
(*ARCH).into()
}
fn get_platform() -> InternedString {
(&*PLATFORM).into()
}
pub fn default_echoip_urls() -> Vec<Url> {
vec![
"https://ipconfig.io".parse().unwrap(),
@@ -170,6 +180,10 @@ pub fn default_echoip_urls() -> Vec<Url> {
#[model = "Model<Self>"]
#[ts(export)]
pub struct ServerInfo {
#[serde(default = "get_arch")]
pub arch: InternedString,
#[serde(default = "get_platform")]
pub platform: InternedString,
pub id: String,
pub name: InternedString,
pub hostname: InternedString,

View File

@@ -2,11 +2,24 @@ use std::io::Cursor;
use std::path::Path;
use tokio::process::Command;
use tracing::instrument;
use crate::Error;
use crate::disk::fsck::RequiresReboot;
use crate::util::Invoke;
#[instrument(skip_all)]
pub async fn btrfs_check_readonly(logicalname: impl AsRef<Path>) -> Result<RequiresReboot, Error> {
Command::new("btrfs")
.arg("check")
.arg("--readonly")
.arg(logicalname.as_ref())
.invoke(crate::ErrorKind::DiskManagement)
.await?;
Ok(RequiresReboot(false))
}
pub async fn btrfs_check_repair(logicalname: impl AsRef<Path>) -> Result<RequiresReboot, Error> {
Command::new("btrfs")
.arg("check")

View File

@@ -4,7 +4,7 @@ use color_eyre::eyre::eyre;
use tokio::process::Command;
use crate::Error;
use crate::disk::fsck::btrfs::btrfs_check_repair;
use crate::disk::fsck::btrfs::{btrfs_check_readonly, btrfs_check_repair};
use crate::disk::fsck::ext4::{e2fsck_aggressive, e2fsck_preen};
use crate::util::Invoke;
@@ -71,7 +71,7 @@ impl RepairStrategy {
logicalname: impl AsRef<Path> + std::fmt::Debug,
) -> Result<RequiresReboot, Error> {
match self {
RepairStrategy::Preen => Ok(RequiresReboot(false)),
RepairStrategy::Preen => btrfs_check_readonly(logicalname).await,
RepairStrategy::Aggressive => btrfs_check_repair(logicalname).await,
}
}

View File

@@ -14,8 +14,8 @@ use patch_db::json_ptr::ROOT;
use reqwest::{Client, Proxy};
use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{CallRemote, Context, Empty, RpcRequest};
use rusqlite::Connection;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use tokio::sync::broadcast::Sender;
use tracing::instrument;
use ts_rs::TS;
@@ -34,7 +34,6 @@ use crate::registry::signer::SignerInfo;
use crate::rpc_continuations::RpcContinuations;
use crate::sign::AnyVerifyingKey;
use crate::util::io::{append_file, read_file_to_string};
use crate::util::sync::SyncMutex;
const DEFAULT_REGISTRY_LISTEN: SocketAddr =
SocketAddr::new(std::net::IpAddr::V4(Ipv4Addr::LOCALHOST), 5959);
@@ -58,6 +57,12 @@ pub struct RegistryConfig {
pub tor_proxy: Option<Url>,
#[arg(short = 'd', long = "datadir", help = "help.arg.data-directory")]
pub datadir: Option<PathBuf>,
#[arg(
short = 'u',
long = "pg-connection-url",
help = "help.arg.postgres-connection-url"
)]
pub pg_connection_url: Option<String>,
}
impl ContextConfig for RegistryConfig {
fn next(&mut self) -> Option<PathBuf> {
@@ -88,7 +93,7 @@ pub struct RegistryContextSeed {
pub rpc_continuations: RpcContinuations,
pub client: Client,
pub shutdown: Sender<()>,
pub metrics_db: SyncMutex<Connection>,
pub pool: Option<PgPool>,
}
#[derive(Clone)]
@@ -119,29 +124,13 @@ impl RegistryContext {
.clone()
.map(Ok)
.unwrap_or_else(|| "socks5h://tor.startos:9050".parse())?;
let metrics_db_path = datadir.join("metrics.db");
let metrics_db = Connection::open(&metrics_db_path).with_kind(ErrorKind::Database)?;
metrics_db
.execute_batch(
"CREATE TABLE IF NOT EXISTS user_activity (
id INTEGER PRIMARY KEY AUTOINCREMENT,
created_at TEXT NOT NULL,
server_id TEXT NOT NULL,
arch TEXT,
os_version TEXT
);
CREATE TABLE IF NOT EXISTS package_request (
id INTEGER PRIMARY KEY AUTOINCREMENT,
created_at TEXT NOT NULL,
pkg_id TEXT NOT NULL,
version TEXT
);
CREATE INDEX IF NOT EXISTS idx_user_activity_created_at ON user_activity(created_at);
CREATE INDEX IF NOT EXISTS idx_package_request_created_at ON package_request(created_at);
CREATE INDEX IF NOT EXISTS idx_package_request_pkg_id ON package_request(pkg_id);",
)
.with_kind(ErrorKind::Database)?;
let metrics_db = SyncMutex::new(metrics_db);
let pool: Option<PgPool> = match &config.pg_connection_url {
Some(url) => match PgPool::connect(url.as_str()).await {
Ok(pool) => Some(pool),
Err(_) => None,
},
None => None,
};
if config.registry_hostname.is_empty() {
return Err(Error::new(
eyre!("{}", t!("registry.context.missing-hostname")),
@@ -165,7 +154,7 @@ impl RegistryContext {
.build()
.with_kind(crate::ErrorKind::ParseUrl)?,
shutdown,
metrics_db,
pool,
})))
}
}

View File

@@ -233,7 +233,7 @@ impl HardwareInfo {
pub async fn load(ctx: &RpcContext) -> Result<Self, Error> {
let s = ctx.db.peek().await.into_public().into_server_info();
Ok(Self {
arch: InternedString::intern(&*crate::ARCH),
arch: s.as_arch().de()?,
ram: s.as_ram().de()?,
devices: Some(s.as_devices().de()?),
})

View File

@@ -1,4 +1,5 @@
use std::collections::BTreeMap;
use std::path::PathBuf;
use clap::Parser;
use imbl_value::InternedString;
@@ -106,8 +107,8 @@ pub async fn set_icon(
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct CliSetIconParams {
#[arg(help = "help.arg.icon-source")]
pub icon: String,
#[arg(help = "help.arg.icon-path")]
pub icon: PathBuf,
}
pub async fn cli_set_icon(
@@ -119,23 +120,7 @@ pub async fn cli_set_icon(
..
}: HandlerArgs<CliContext, CliSetIconParams>,
) -> Result<(), Error> {
let data_url = if icon.starts_with("data:") {
icon.parse::<DataUrl<'static>>()
.with_kind(ErrorKind::ParseUrl)?
} else if icon.starts_with("https://") || icon.starts_with("http://") {
let res = ctx
.client
.get(&icon)
.send()
.await
.with_kind(ErrorKind::Network)?;
DataUrl::from_response(res).await?
} else {
let path = icon
.strip_prefix("file://")
.unwrap_or(&icon);
DataUrl::from_path(path).await?
};
let data_url = DataUrl::from_path(icon).await?;
ctx.call_remote::<RegistryContext>(
&parent_method.into_iter().chain(method).join("."),
imbl_value::json!({

View File

@@ -0,0 +1,25 @@
#!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")"
TMP_DIR=$(mktemp -d)
mkdir $TMP_DIR/pgdata
docker run -d --rm --name=tmp_postgres -e POSTGRES_PASSWORD=password -v $TMP_DIR/pgdata:/var/lib/postgresql/data postgres
(
set -e
ctr=0
until docker exec tmp_postgres psql -U postgres 2> /dev/null || [ $ctr -ge 5 ]; do
ctr=$[ctr + 1]
sleep 5;
done
PG_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' tmp_postgres)
cat "./registry_schema.sql" | docker exec -i tmp_postgres psql -U postgres -d postgres -f-
cd ../../..
DATABASE_URL=postgres://postgres:password@$PG_IP/postgres PLATFORM=$(uname -m) cargo sqlx prepare -- --lib --profile=test --workspace
echo "Subscript Complete"
)
docker stop tmp_postgres
sudo rm -rf $TMP_DIR

View File

@@ -0,0 +1,828 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 14.12 (Ubuntu 14.12-0ubuntu0.22.04.1)
-- Dumped by pg_dump version 14.12 (Ubuntu 14.12-0ubuntu0.22.04.1)
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
SET default_tablespace = '';
SET default_table_access_method = heap;
--
-- Name: admin; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.admin (
id character varying NOT NULL,
created_at timestamp with time zone NOT NULL,
pass_hash character varying NOT NULL,
deleted_at timestamp with time zone
);
ALTER TABLE public.admin OWNER TO alpha_admin;
--
-- Name: admin_pkgs; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.admin_pkgs (
id bigint NOT NULL,
admin character varying NOT NULL,
pkg_id character varying NOT NULL
);
ALTER TABLE public.admin_pkgs OWNER TO alpha_admin;
--
-- Name: admin_pkgs_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin
--
CREATE SEQUENCE public.admin_pkgs_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.admin_pkgs_id_seq OWNER TO alpha_admin;
--
-- Name: admin_pkgs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin
--
ALTER SEQUENCE public.admin_pkgs_id_seq OWNED BY public.admin_pkgs.id;
--
-- Name: category; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.category (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
name character varying NOT NULL,
description character varying NOT NULL,
priority bigint DEFAULT 0 NOT NULL
);
ALTER TABLE public.category OWNER TO alpha_admin;
--
-- Name: category_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin
--
CREATE SEQUENCE public.category_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.category_id_seq OWNER TO alpha_admin;
--
-- Name: category_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin
--
ALTER SEQUENCE public.category_id_seq OWNED BY public.category.id;
--
-- Name: eos_hash; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.eos_hash (
id bigint NOT NULL,
version character varying NOT NULL,
hash character varying NOT NULL
);
ALTER TABLE public.eos_hash OWNER TO alpha_admin;
--
-- Name: eos_hash_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin
--
CREATE SEQUENCE public.eos_hash_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.eos_hash_id_seq OWNER TO alpha_admin;
--
-- Name: eos_hash_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin
--
ALTER SEQUENCE public.eos_hash_id_seq OWNED BY public.eos_hash.id;
--
-- Name: error_log_record; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.error_log_record (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
epoch character varying NOT NULL,
commit_hash character varying NOT NULL,
source_file character varying NOT NULL,
line bigint NOT NULL,
target character varying NOT NULL,
level character varying NOT NULL,
message character varying NOT NULL,
incidents bigint NOT NULL
);
ALTER TABLE public.error_log_record OWNER TO alpha_admin;
--
-- Name: error_log_record_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin
--
CREATE SEQUENCE public.error_log_record_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.error_log_record_id_seq OWNER TO alpha_admin;
--
-- Name: error_log_record_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin
--
ALTER SEQUENCE public.error_log_record_id_seq OWNED BY public.error_log_record.id;
--
-- Name: metric; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.metric (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
version character varying NOT NULL,
pkg_id character varying NOT NULL
);
ALTER TABLE public.metric OWNER TO alpha_admin;
--
-- Name: metric_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin
--
CREATE SEQUENCE public.metric_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.metric_id_seq OWNER TO alpha_admin;
--
-- Name: metric_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin
--
ALTER SEQUENCE public.metric_id_seq OWNED BY public.metric.id;
--
-- Name: os_version; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.os_version (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
number character varying NOT NULL,
headline character varying NOT NULL,
release_notes character varying NOT NULL,
arch character varying
);
ALTER TABLE public.os_version OWNER TO alpha_admin;
--
-- Name: os_version_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin
--
CREATE SEQUENCE public.os_version_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.os_version_id_seq OWNER TO alpha_admin;
--
-- Name: os_version_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin
--
ALTER SEQUENCE public.os_version_id_seq OWNED BY public.os_version.id;
--
-- Name: persistent_migration; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.persistent_migration (
id integer NOT NULL,
version integer NOT NULL,
label character varying,
"timestamp" timestamp with time zone NOT NULL
);
ALTER TABLE public.persistent_migration OWNER TO alpha_admin;
--
-- Name: persistent_migration_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin
--
CREATE SEQUENCE public.persistent_migration_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.persistent_migration_id_seq OWNER TO alpha_admin;
--
-- Name: persistent_migration_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin
--
ALTER SEQUENCE public.persistent_migration_id_seq OWNED BY public.persistent_migration.id;
--
-- Name: pkg_category; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.pkg_category (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
category_id bigint NOT NULL,
pkg_id character varying NOT NULL
);
ALTER TABLE public.pkg_category OWNER TO alpha_admin;
--
-- Name: pkg_dependency; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.pkg_dependency (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
pkg_id character varying NOT NULL,
pkg_version character varying NOT NULL,
dep_id character varying NOT NULL,
dep_version_range character varying NOT NULL
);
ALTER TABLE public.pkg_dependency OWNER TO alpha_admin;
--
-- Name: pkg_dependency_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin
--
CREATE SEQUENCE public.pkg_dependency_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.pkg_dependency_id_seq OWNER TO alpha_admin;
--
-- Name: pkg_dependency_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin
--
ALTER SEQUENCE public.pkg_dependency_id_seq OWNED BY public.pkg_dependency.id;
--
-- Name: pkg_record; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.pkg_record (
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone,
pkg_id character varying NOT NULL,
hidden boolean DEFAULT false NOT NULL
);
ALTER TABLE public.pkg_record OWNER TO alpha_admin;
--
-- Name: service_category_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin
--
CREATE SEQUENCE public.service_category_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.service_category_id_seq OWNER TO alpha_admin;
--
-- Name: service_category_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin
--
ALTER SEQUENCE public.service_category_id_seq OWNED BY public.pkg_category.id;
--
-- Name: upload; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.upload (
id bigint NOT NULL,
uploader character varying NOT NULL,
pkg_id character varying NOT NULL,
pkg_version character varying NOT NULL,
created_at timestamp with time zone NOT NULL
);
ALTER TABLE public.upload OWNER TO alpha_admin;
--
-- Name: upload_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin
--
CREATE SEQUENCE public.upload_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.upload_id_seq OWNER TO alpha_admin;
--
-- Name: upload_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin
--
ALTER SEQUENCE public.upload_id_seq OWNED BY public.upload.id;
--
-- Name: user_activity; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.user_activity (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
server_id character varying NOT NULL,
os_version character varying,
arch character varying
);
ALTER TABLE public.user_activity OWNER TO alpha_admin;
--
-- Name: user_activity_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin
--
CREATE SEQUENCE public.user_activity_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.user_activity_id_seq OWNER TO alpha_admin;
--
-- Name: user_activity_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin
--
ALTER SEQUENCE public.user_activity_id_seq OWNED BY public.user_activity.id;
--
-- Name: version; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.version (
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone,
number character varying NOT NULL,
release_notes character varying NOT NULL,
os_version character varying NOT NULL,
pkg_id character varying NOT NULL,
title character varying NOT NULL,
desc_short character varying NOT NULL,
desc_long character varying NOT NULL,
icon_type character varying NOT NULL,
deprecated_at timestamp with time zone
);
ALTER TABLE public.version OWNER TO alpha_admin;
--
-- Name: version_platform; Type: TABLE; Schema: public; Owner: alpha_admin
--
CREATE TABLE public.version_platform (
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone,
pkg_id character varying NOT NULL,
version_number character varying NOT NULL,
arch character varying NOT NULL,
ram bigint,
device jsonb
);
ALTER TABLE public.version_platform OWNER TO alpha_admin;
--
-- Name: admin_pkgs id; Type: DEFAULT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.admin_pkgs ALTER COLUMN id SET DEFAULT nextval('public.admin_pkgs_id_seq'::regclass);
--
-- Name: category id; Type: DEFAULT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.category ALTER COLUMN id SET DEFAULT nextval('public.category_id_seq'::regclass);
--
-- Name: eos_hash id; Type: DEFAULT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.eos_hash ALTER COLUMN id SET DEFAULT nextval('public.eos_hash_id_seq'::regclass);
--
-- Name: error_log_record id; Type: DEFAULT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.error_log_record ALTER COLUMN id SET DEFAULT nextval('public.error_log_record_id_seq'::regclass);
--
-- Name: metric id; Type: DEFAULT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.metric ALTER COLUMN id SET DEFAULT nextval('public.metric_id_seq'::regclass);
--
-- Name: os_version id; Type: DEFAULT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.os_version ALTER COLUMN id SET DEFAULT nextval('public.os_version_id_seq'::regclass);
--
-- Name: persistent_migration id; Type: DEFAULT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.persistent_migration ALTER COLUMN id SET DEFAULT nextval('public.persistent_migration_id_seq'::regclass);
--
-- Name: pkg_category id; Type: DEFAULT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.pkg_category ALTER COLUMN id SET DEFAULT nextval('public.service_category_id_seq'::regclass);
--
-- Name: pkg_dependency id; Type: DEFAULT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.pkg_dependency ALTER COLUMN id SET DEFAULT nextval('public.pkg_dependency_id_seq'::regclass);
--
-- Name: upload id; Type: DEFAULT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.upload ALTER COLUMN id SET DEFAULT nextval('public.upload_id_seq'::regclass);
--
-- Name: user_activity id; Type: DEFAULT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.user_activity ALTER COLUMN id SET DEFAULT nextval('public.user_activity_id_seq'::regclass);
--
-- Name: admin admin_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.admin
ADD CONSTRAINT admin_pkey PRIMARY KEY (id);
--
-- Name: admin_pkgs admin_pkgs_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.admin_pkgs
ADD CONSTRAINT admin_pkgs_pkey PRIMARY KEY (id);
--
-- Name: category category_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.category
ADD CONSTRAINT category_pkey PRIMARY KEY (id);
--
-- Name: eos_hash eos_hash_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.eos_hash
ADD CONSTRAINT eos_hash_pkey PRIMARY KEY (id);
--
-- Name: error_log_record error_log_record_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.error_log_record
ADD CONSTRAINT error_log_record_pkey PRIMARY KEY (id);
--
-- Name: metric metric_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.metric
ADD CONSTRAINT metric_pkey PRIMARY KEY (id);
--
-- Name: os_version os_version_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.os_version
ADD CONSTRAINT os_version_pkey PRIMARY KEY (id);
--
-- Name: persistent_migration persistent_migration_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.persistent_migration
ADD CONSTRAINT persistent_migration_pkey PRIMARY KEY (id);
--
-- Name: pkg_category pkg_category_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.pkg_category
ADD CONSTRAINT pkg_category_pkey PRIMARY KEY (id);
--
-- Name: pkg_dependency pkg_dependency_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.pkg_dependency
ADD CONSTRAINT pkg_dependency_pkey PRIMARY KEY (id);
--
-- Name: admin_pkgs unique_admin_pkg; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.admin_pkgs
ADD CONSTRAINT unique_admin_pkg UNIQUE (pkg_id, admin);
--
-- Name: error_log_record unique_log_record; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.error_log_record
ADD CONSTRAINT unique_log_record UNIQUE (epoch, commit_hash, source_file, line, target, level, message);
--
-- Name: category unique_name; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.category
ADD CONSTRAINT unique_name UNIQUE (name);
--
-- Name: pkg_category unique_pkg_category; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.pkg_category
ADD CONSTRAINT unique_pkg_category UNIQUE (pkg_id, category_id);
--
-- Name: pkg_dependency unique_pkg_dep_version; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.pkg_dependency
ADD CONSTRAINT unique_pkg_dep_version UNIQUE (pkg_id, pkg_version, dep_id);
--
-- Name: eos_hash unique_version; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.eos_hash
ADD CONSTRAINT unique_version UNIQUE (version);
--
-- Name: upload upload_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.upload
ADD CONSTRAINT upload_pkey PRIMARY KEY (id);
--
-- Name: user_activity user_activity_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.user_activity
ADD CONSTRAINT user_activity_pkey PRIMARY KEY (id);
--
-- Name: version version_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.version
ADD CONSTRAINT version_pkey PRIMARY KEY (pkg_id, number);
--
-- Name: version_platform version_platform_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.version_platform
ADD CONSTRAINT version_platform_pkey PRIMARY KEY (pkg_id, version_number, arch);
--
-- Name: category_name_idx; Type: INDEX; Schema: public; Owner: alpha_admin
--
CREATE UNIQUE INDEX category_name_idx ON public.category USING btree (name);
--
-- Name: pkg_record_pkg_id_idx; Type: INDEX; Schema: public; Owner: alpha_admin
--
CREATE UNIQUE INDEX pkg_record_pkg_id_idx ON public.pkg_record USING btree (pkg_id);
--
-- Name: version_number_idx; Type: INDEX; Schema: public; Owner: alpha_admin
--
CREATE INDEX version_number_idx ON public.version USING btree (number);
--
-- Name: admin_pkgs admin_pkgs_admin_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.admin_pkgs
ADD CONSTRAINT admin_pkgs_admin_fkey FOREIGN KEY (admin) REFERENCES public.admin(id) ON UPDATE RESTRICT ON DELETE RESTRICT;
--
-- Name: metric metric_pkg_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.metric
ADD CONSTRAINT metric_pkg_id_fkey FOREIGN KEY (pkg_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT;
--
-- Name: pkg_category pkg_category_category_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.pkg_category
ADD CONSTRAINT pkg_category_category_id_fkey FOREIGN KEY (category_id) REFERENCES public.category(id) ON UPDATE RESTRICT ON DELETE RESTRICT;
--
-- Name: pkg_category pkg_category_pkg_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.pkg_category
ADD CONSTRAINT pkg_category_pkg_id_fkey FOREIGN KEY (pkg_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT;
--
-- Name: pkg_dependency pkg_dependency_dep_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.pkg_dependency
ADD CONSTRAINT pkg_dependency_dep_id_fkey FOREIGN KEY (dep_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT;
--
-- Name: pkg_dependency pkg_dependency_pkg_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.pkg_dependency
ADD CONSTRAINT pkg_dependency_pkg_id_fkey FOREIGN KEY (pkg_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT;
--
-- Name: upload upload_pkg_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.upload
ADD CONSTRAINT upload_pkg_id_fkey FOREIGN KEY (pkg_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT;
--
-- Name: upload upload_uploader_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.upload
ADD CONSTRAINT upload_uploader_fkey FOREIGN KEY (uploader) REFERENCES public.admin(id) ON UPDATE RESTRICT ON DELETE RESTRICT;
--
-- Name: version version_pkg_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.version
ADD CONSTRAINT version_pkg_id_fkey FOREIGN KEY (pkg_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT;
--
-- Name: version_platform version_platform_pkg_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin
--
ALTER TABLE ONLY public.version_platform
ADD CONSTRAINT version_platform_pkg_id_fkey FOREIGN KEY (pkg_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT;
--
-- PostgreSQL database dump complete
--

View File

@@ -1,436 +0,0 @@
use clap::Parser;
use rpc_toolkit::{Context, HandlerExt, ParentHandler, from_fn_async};
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use crate::context::CliContext;
use crate::prelude::*;
use crate::registry::context::RegistryContext;
use crate::util::serde::{HandlerExtSerde, WithIoFormat, display_serializable};
pub fn metrics_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
.subcommand(
"summary",
from_fn_async(get_summary)
.with_metadata("admin", Value::Bool(true))
.with_display_serializable()
.with_custom_display_fn(|handle, result| display_summary(handle.params, result))
.with_about("about.get-metrics-summary")
.with_call_remote::<CliContext>(),
)
.subcommand(
"users",
from_fn_async(get_users)
.with_metadata("admin", Value::Bool(true))
.with_display_serializable()
.with_custom_display_fn(|handle, result| display_users(handle.params, result))
.with_about("about.get-metrics-users")
.with_call_remote::<CliContext>(),
)
.subcommand(
"downloads",
from_fn_async(get_downloads)
.with_metadata("admin", Value::Bool(true))
.with_display_serializable()
.with_custom_display_fn(|handle, result| {
display_downloads(handle.params, result)
})
.with_about("about.get-metrics-downloads")
.with_call_remote::<CliContext>(),
)
}
// --- summary ---
#[derive(Debug, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct CountEntry {
pub label: String,
pub count: u64,
}
#[derive(Debug, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct MetricsSummary {
pub total_checkins: u64,
pub unique_servers: u64,
pub total_package_requests: u64,
pub by_arch: Vec<CountEntry>,
pub by_os_version: Vec<CountEntry>,
}
pub async fn get_summary(ctx: RegistryContext) -> Result<MetricsSummary, Error> {
tokio::task::spawn_blocking(move || {
ctx.metrics_db.peek(|conn| {
let total_checkins: u64 = conn
.query_row("SELECT COUNT(*) FROM user_activity", [], |row| row.get(0))
.with_kind(ErrorKind::Database)?;
let unique_servers: u64 = conn
.query_row(
"SELECT COUNT(DISTINCT server_id) FROM user_activity",
[],
|row| row.get(0),
)
.with_kind(ErrorKind::Database)?;
let total_package_requests: u64 = conn
.query_row("SELECT COUNT(*) FROM package_request", [], |row| row.get(0))
.with_kind(ErrorKind::Database)?;
let by_arch = query_count_entries(
conn,
"SELECT COALESCE(arch, 'unknown'), COUNT(*) FROM user_activity GROUP BY arch ORDER BY COUNT(*) DESC",
)?;
let by_os_version = query_count_entries(
conn,
"SELECT COALESCE(os_version, 'unknown'), COUNT(*) FROM user_activity GROUP BY os_version ORDER BY COUNT(*) DESC",
)?;
Ok(MetricsSummary {
total_checkins,
unique_servers,
total_package_requests,
by_arch,
by_os_version,
})
})
})
.await
.with_kind(ErrorKind::Unknown)?
}
fn display_summary<T>(params: WithIoFormat<T>, summary: MetricsSummary) -> Result<(), Error> {
use prettytable::*;
if let Some(format) = params.format {
return display_serializable(format, summary);
}
println!("Total check-ins: {}", summary.total_checkins);
println!("Unique servers: {}", summary.unique_servers);
println!("Total package requests: {}", summary.total_package_requests);
if !summary.by_arch.is_empty() {
println!();
let mut table = Table::new();
table.add_row(row![bc => "ARCHITECTURE", "COUNT"]);
for entry in &summary.by_arch {
table.add_row(row![&entry.label, entry.count]);
}
table.print_tty(false)?;
}
if !summary.by_os_version.is_empty() {
println!();
let mut table = Table::new();
table.add_row(row![bc => "OS VERSION", "COUNT"]);
for entry in &summary.by_os_version {
table.add_row(row![&entry.label, entry.count]);
}
table.print_tty(false)?;
}
Ok(())
}
// --- users ---
#[derive(Debug, Deserialize, Serialize, Parser, TS)]
#[group(skip)]
#[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct GetUsersParams {
/// Start of time range (RFC 3339)
#[ts(type = "string | null")]
#[arg(long, help = "help.arg.metrics-after")]
pub after: Option<String>,
/// End of time range (RFC 3339)
#[ts(type = "string | null")]
#[arg(long, help = "help.arg.metrics-before")]
pub before: Option<String>,
}
#[derive(Debug, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct UsersResponse {
pub unique_servers: u64,
pub total_checkins: u64,
}
pub async fn get_users(
ctx: RegistryContext,
GetUsersParams { after, before }: GetUsersParams,
) -> Result<UsersResponse, Error> {
tokio::task::spawn_blocking(move || {
ctx.metrics_db.peek(|conn| {
let (where_clause, params) = time_range_where(&after, &before);
let unique_servers: u64 = conn
.query_row(
&format!("SELECT COUNT(DISTINCT server_id) FROM user_activity{where_clause}"),
rusqlite::params_from_iter(&params),
|row| row.get(0),
)
.with_kind(ErrorKind::Database)?;
let total_checkins: u64 = conn
.query_row(
&format!("SELECT COUNT(*) FROM user_activity{where_clause}"),
rusqlite::params_from_iter(&params),
|row| row.get(0),
)
.with_kind(ErrorKind::Database)?;
Ok(UsersResponse {
unique_servers,
total_checkins,
})
})
})
.await
.with_kind(ErrorKind::Unknown)?
}
fn display_users<T>(params: WithIoFormat<T>, response: UsersResponse) -> Result<(), Error> {
if let Some(format) = params.format {
return display_serializable(format, response);
}
println!("Unique servers: {}", response.unique_servers);
println!("Total check-ins: {}", response.total_checkins);
Ok(())
}
// --- downloads ---
#[derive(Debug, Deserialize, Serialize, Parser, TS)]
#[group(skip)]
#[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct GetDownloadsParams {
/// Filter by package ID
#[ts(type = "string | null")]
#[arg(long, help = "help.arg.metrics-pkg-id")]
pub pkg_id: Option<String>,
/// Filter by version
#[ts(type = "string | null")]
#[arg(long, help = "help.arg.metrics-version")]
pub version: Option<String>,
/// Start of time range (RFC 3339)
#[ts(type = "string | null")]
#[arg(long, help = "help.arg.metrics-after")]
pub after: Option<String>,
/// End of time range (RFC 3339)
#[ts(type = "string | null")]
#[arg(long, help = "help.arg.metrics-before")]
pub before: Option<String>,
}
#[derive(Debug, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct PackageVersionCount {
pub pkg_id: String,
pub version: String,
pub count: u64,
}
#[derive(Debug, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct DownloadsResponse {
pub total_requests: u64,
pub by_package: Vec<CountEntry>,
pub by_package_version: Vec<PackageVersionCount>,
}
pub async fn get_downloads(
ctx: RegistryContext,
GetDownloadsParams {
pkg_id,
version,
after,
before,
}: GetDownloadsParams,
) -> Result<DownloadsResponse, Error> {
tokio::task::spawn_blocking(move || {
ctx.metrics_db.peek(|conn| {
let (where_clause, params) =
downloads_where(&pkg_id, &version, &after, &before);
let total_requests: u64 = conn
.query_row(
&format!("SELECT COUNT(*) FROM package_request{where_clause}"),
rusqlite::params_from_iter(&params),
|row| row.get(0),
)
.with_kind(ErrorKind::Database)?;
let by_package = query_count_entries_with_params(
conn,
&format!(
"SELECT pkg_id, COUNT(*) FROM package_request{where_clause} GROUP BY pkg_id ORDER BY COUNT(*) DESC"
),
&params,
)?;
let by_package_version = {
let mut stmt = conn
.prepare(&format!(
"SELECT pkg_id, COALESCE(version, 'unknown'), COUNT(*) FROM package_request{where_clause} GROUP BY pkg_id, version ORDER BY pkg_id, COUNT(*) DESC"
))
.with_kind(ErrorKind::Database)?;
let rows = stmt
.query_map(rusqlite::params_from_iter(&params), |row| {
Ok(PackageVersionCount {
pkg_id: row.get(0)?,
version: row.get(1)?,
count: row.get(2)?,
})
})
.with_kind(ErrorKind::Database)?;
rows.map(|r| r.with_kind(ErrorKind::Database))
.collect::<Result<Vec<_>, _>>()?
};
Ok(DownloadsResponse {
total_requests,
by_package,
by_package_version,
})
})
})
.await
.with_kind(ErrorKind::Unknown)?
}
fn display_downloads(
params: WithIoFormat<GetDownloadsParams>,
response: DownloadsResponse,
) -> Result<(), Error> {
use prettytable::*;
if let Some(format) = params.format {
return display_serializable(format, response);
}
println!("Total requests: {}", response.total_requests);
if !response.by_package.is_empty() {
println!();
let mut table = Table::new();
table.add_row(row![bc => "PACKAGE", "COUNT"]);
for entry in &response.by_package {
table.add_row(row![&entry.label, entry.count]);
}
table.print_tty(false)?;
}
if !response.by_package_version.is_empty() {
println!();
let mut table = Table::new();
table.add_row(row![bc => "PACKAGE", "VERSION", "COUNT"]);
for entry in &response.by_package_version {
table.add_row(row![&entry.pkg_id, &entry.version, entry.count]);
}
table.print_tty(false)?;
}
Ok(())
}
// --- helpers ---
fn query_count_entries(
conn: &rusqlite::Connection,
sql: &str,
) -> Result<Vec<CountEntry>, Error> {
query_count_entries_with_params(conn, sql, &[])
}
fn query_count_entries_with_params(
conn: &rusqlite::Connection,
sql: &str,
params: &[String],
) -> Result<Vec<CountEntry>, Error> {
let mut stmt = conn.prepare(sql).with_kind(ErrorKind::Database)?;
let rows = stmt
.query_map(rusqlite::params_from_iter(params), |row| {
Ok(CountEntry {
label: row.get(0)?,
count: row.get(1)?,
})
})
.with_kind(ErrorKind::Database)?;
rows.map(|r| r.with_kind(ErrorKind::Database))
.collect()
}
fn time_range_where(
after: &Option<String>,
before: &Option<String>,
) -> (String, Vec<String>) {
let mut conditions = Vec::new();
let mut params = Vec::new();
if let Some(a) = after {
params.push(a.clone());
conditions.push(format!("created_at >= ?{}", params.len()));
}
if let Some(b) = before {
params.push(b.clone());
conditions.push(format!("created_at < ?{}", params.len()));
}
let clause = if conditions.is_empty() {
String::new()
} else {
format!(" WHERE {}", conditions.join(" AND "))
};
(clause, params)
}
fn downloads_where(
pkg_id: &Option<String>,
version: &Option<String>,
after: &Option<String>,
before: &Option<String>,
) -> (String, Vec<String>) {
let mut conditions = Vec::new();
let mut params = Vec::new();
if let Some(id) = pkg_id {
params.push(id.clone());
conditions.push(format!("pkg_id = ?{}", params.len()));
}
if let Some(v) = version {
params.push(v.clone());
conditions.push(format!("version = ?{}", params.len()));
}
if let Some(a) = after {
params.push(a.clone());
conditions.push(format!("created_at >= ?{}", params.len()));
}
if let Some(b) = before {
params.push(b.clone());
conditions.push(format!("created_at < ?{}", params.len()));
}
let clause = if conditions.is_empty() {
String::new()
} else {
format!(" WHERE {}", conditions.join(" AND "))
};
(clause, params)
}

View File

@@ -27,7 +27,6 @@ pub mod context;
pub mod db;
pub mod device_info;
pub mod info;
pub mod metrics;
mod migrations;
pub mod os;
pub mod package;
@@ -101,10 +100,6 @@ pub fn registry_api<C: Context>() -> ParentHandler<C> {
"db",
db::db_api::<C>().with_about("about.commands-registry-db"),
)
.subcommand(
"metrics",
metrics::metrics_api::<C>().with_about("about.commands-registry-metrics"),
)
}
pub fn registry_router(ctx: RegistryContext) -> Router {

View File

@@ -8,7 +8,6 @@ pub const SIG_CONTEXT: &str = "startos";
pub mod asset;
pub mod index;
pub mod promote;
pub mod version;
pub fn os_api<C: Context>() -> ParentHandler<C> {
@@ -29,10 +28,4 @@ pub fn os_api<C: Context>() -> ParentHandler<C> {
"version",
version::version_api::<C>().with_about("about.commands-add-remove-list-versions"),
)
.subcommand(
"promote",
from_fn_async(promote::cli_os_promote)
.no_display()
.with_about("about.promote-os-registry"),
)
}

View File

@@ -1,114 +0,0 @@
use clap::Parser;
use exver::Version;
use imbl_value::InternedString;
use serde::{Deserialize, Serialize};
use url::Url;
use crate::context::CliContext;
use crate::prelude::*;
use crate::registry::os::SIG_CONTEXT;
use crate::registry::os::index::OsIndex;
use crate::registry::package::promote::{call_registry, resolve_registry_url};
use crate::sign::commitment::blake3::Blake3Commitment;
use crate::sign::ed25519::Ed25519;
use crate::sign::{AnySignature, SignatureScheme};
#[derive(Debug, Deserialize, Serialize, Parser)]
#[group(skip)]
#[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")]
pub struct CliOsPromoteParams {
#[arg(long, help = "help.arg.from-registry-url")]
pub from: Option<Url>,
#[arg(long, help = "help.arg.to-registry-url")]
pub to: Option<Url>,
#[arg(help = "help.arg.os-version")]
pub version: Version,
}
pub async fn cli_os_promote(
ctx: CliContext,
CliOsPromoteParams { from, to, version }: CliOsPromoteParams,
) -> Result<(), Error> {
if from.is_none() && to.is_none() {
return Err(Error::new(
eyre!("{}", t!("registry.os.promote.need-from-or-to")),
ErrorKind::InvalidRequest,
));
}
let from_url = resolve_registry_url(from.as_ref(), &ctx)?;
let to_url = resolve_registry_url(to.as_ref(), &ctx)?;
// Fetch OS index from source registry
let res: Value = call_registry(&ctx, from_url, "os.index", imbl_value::json!({})).await?;
let os_index: OsIndex = from_value(res)?;
// Find the target version
let version_info = os_index
.versions
.0
.get(&version)
.ok_or_else(|| {
Error::new(
eyre!(
"{}",
t!(
"registry.os.promote.version-not-found",
version = &version
)
),
ErrorKind::NotFound,
)
})?;
// Add the version to the target registry
call_registry(
&ctx,
to_url.clone(),
"os.version.add",
imbl_value::json!({
"version": &version,
"headline": &version_info.headline,
"releaseNotes": &version_info.release_notes,
"sourceVersion": &version_info.source_version,
}),
)
.await?;
// Promote all assets for each type and platform
promote_assets(&ctx, &to_url, &version, &version_info.iso, "os.asset.add.iso").await?;
promote_assets(&ctx, &to_url, &version, &version_info.squashfs, "os.asset.add.squashfs").await?;
promote_assets(&ctx, &to_url, &version, &version_info.img, "os.asset.add.img").await?;
Ok(())
}
async fn promote_assets(
ctx: &CliContext,
to_url: &Url,
version: &Version,
assets: &std::collections::BTreeMap<InternedString, crate::registry::asset::RegistryAsset<Blake3Commitment>>,
method: &str,
) -> Result<(), Error> {
for (platform, asset) in assets {
let commitment = &asset.commitment;
let signature =
AnySignature::Ed25519(Ed25519.sign_commitment(ctx.developer_key()?, commitment, SIG_CONTEXT)?);
call_registry(
ctx,
to_url.clone(),
method,
imbl_value::json!({
"version": version,
"platform": platform,
"url": &asset.urls[0],
"signature": signature,
"commitment": commitment,
}),
)
.await?;
}
Ok(())
}

View File

@@ -1,14 +1,12 @@
use std::collections::BTreeMap;
use chrono::Utc;
use chrono::{DateTime, NaiveDate, NaiveDateTime, Utc};
use clap::Parser;
use exver::{Version, VersionRange};
use imbl_value::InternedString;
use itertools::Itertools;
use rpc_toolkit::{Context, HandlerExt, ParentHandler, from_fn_async};
use rusqlite::params;
use serde::{Deserialize, Serialize};
use tracing::warn;
use ts_rs::TS;
use crate::context::CliContext;
@@ -161,6 +159,33 @@ pub struct GetOsVersionParams {
pub device_info: Option<DeviceInfo>,
}
struct PgDateTime(DateTime<Utc>);
impl sqlx::Type<sqlx::Postgres> for PgDateTime {
fn type_info() -> <sqlx::Postgres as sqlx::Database>::TypeInfo {
sqlx::postgres::PgTypeInfo::with_oid(sqlx::postgres::types::Oid(1184))
}
}
impl sqlx::Encode<'_, sqlx::Postgres> for PgDateTime {
fn encode_by_ref(
&self,
buf: &mut <sqlx::Postgres as sqlx::Database>::ArgumentBuffer<'_>,
) -> Result<sqlx::encode::IsNull, sqlx::error::BoxDynError> {
fn postgres_epoch_datetime() -> NaiveDateTime {
NaiveDate::from_ymd_opt(2000, 1, 1)
.expect("expected 2000-01-01 to be a valid NaiveDate")
.and_hms_opt(0, 0, 0)
.expect("expected 2000-01-01T00:00:00 to be a valid NaiveDateTime")
}
let micros = (self.0.naive_utc() - postgres_epoch_datetime())
.num_microseconds()
.ok_or_else(|| format!("NaiveDateTime out of range for Postgres: {:?}", self.0))?;
micros.encode(buf)
}
fn size_hint(&self) -> usize {
std::mem::size_of::<i64>()
}
}
pub async fn get_version(
ctx: RegistryContext,
GetOsVersionParams {
@@ -174,28 +199,16 @@ pub async fn get_version(
{
let source = source.or_else(|| device_info.as_ref().map(|d| d.os.version.clone()));
let platform = platform.or_else(|| device_info.as_ref().map(|d| d.os.platform.clone()));
if let (Some(server_id), Some(arch)) = (server_id, &platform) {
const MAX_SERVER_ID_LEN: usize = 256;
if server_id.len() <= MAX_SERVER_ID_LEN {
let created_at = Utc::now().to_rfc3339();
let arch = arch.to_string();
let os_version = source.as_ref().map(|v| v.to_string());
let ctx = ctx.clone();
tokio::task::spawn_blocking(move || {
ctx.metrics_db.mutate(|conn| {
if let Err(e) = conn.execute(
concat!(
"INSERT INTO user_activity ",
"(created_at, server_id, arch, os_version) ",
"VALUES (?1, ?2, ?3, ?4)"
),
params![created_at, server_id, arch, os_version],
) {
warn!("failed to record user activity metric: {e}");
}
});
});
}
if let (Some(pool), Some(server_id), Some(arch)) = (&ctx.pool, server_id, &platform) {
let created_at = Utc::now();
sqlx::query("INSERT INTO user_activity (created_at, server_id, arch) VALUES ($1, $2, $3)")
.bind(PgDateTime(created_at))
.bind(server_id)
.bind(&**arch)
.execute(pool)
.await
.with_kind(ErrorKind::Database)?;
}
let target = target.unwrap_or(VersionRange::Any);
let mut res = to_value::<BTreeMap<Version, OsVersionInfo>>(

View File

@@ -8,10 +8,6 @@ use itertools::Itertools;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use chrono::Utc;
use rusqlite::params;
use tracing::warn;
use crate::PackageId;
use crate::context::CliContext;
use crate::prelude::*;
@@ -237,25 +233,6 @@ pub async fn get_package(ctx: RegistryContext, params: GetPackageParams) -> Resu
}
}
if let Some(id) = &params.id {
if params.target_version.is_some() {
let created_at = Utc::now().to_rfc3339();
let pkg_id = id.to_string();
let version = best
.get(id)
.and_then(|b| b.keys().last())
.map(|v| v.to_string());
let ctx = ctx.clone();
tokio::task::spawn_blocking(move || {
ctx.metrics_db.mutate(|conn| {
if let Err(e) = conn.execute(
"INSERT INTO package_request (created_at, pkg_id, version) VALUES (?1, ?2, ?3)",
params![created_at, pkg_id, version],
) {
warn!("failed to record package request metric: {e}");
}
});
});
}
let categories = peek
.as_index()
.as_package()

View File

@@ -8,7 +8,6 @@ pub mod add;
pub mod category;
pub mod get;
pub mod index;
pub mod promote;
pub mod signer;
pub fn package_api<C: Context>() -> ParentHandler<C> {
@@ -99,12 +98,6 @@ pub fn package_api<C: Context>() -> ParentHandler<C> {
.no_display()
.with_about("about.download-s9pk"),
)
.subcommand(
"promote",
from_fn_async(promote::cli_promote)
.no_display()
.with_about("about.promote-package-registry"),
)
.subcommand(
"category",
category::category_api::<C>().with_about("about.update-categories-registry"),

View File

@@ -1,144 +0,0 @@
use clap::Parser;
use http::HeaderMap;
use imbl_value::InternedString;
use serde::{Deserialize, Serialize};
use url::Url;
use crate::PackageId;
use crate::context::CliContext;
use crate::prelude::*;
use crate::registry::package::get::{GetPackageResponseFull, PackageDetailLevel};
use crate::s9pk::v2::SIG_CONTEXT;
use crate::sign::ed25519::Ed25519;
use crate::sign::{AnySignature, SignatureScheme};
use crate::util::VersionString;
#[derive(Debug, Deserialize, Serialize, Parser)]
#[group(skip)]
#[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")]
pub struct CliPromoteParams {
#[arg(long, help = "help.arg.from-registry-url")]
pub from: Option<Url>,
#[arg(long, help = "help.arg.to-registry-url")]
pub to: Option<Url>,
#[arg(help = "help.arg.package-id")]
pub id: PackageId,
#[arg(help = "help.arg.package-version")]
pub version: VersionString,
}
pub fn registry_rpc_url(url: &Url) -> Result<Url, Error> {
let mut url = url.clone();
url.path_segments_mut()
.map_err(|_| eyre!("Url cannot be base"))
.with_kind(ErrorKind::ParseUrl)?
.push("rpc")
.push("v0");
Ok(url)
}
pub fn resolve_registry_url(explicit: Option<&Url>, ctx: &CliContext) -> Result<Url, Error> {
if let Some(url) = explicit {
registry_rpc_url(url)
} else if let Some(url) = &ctx.registry_url {
Ok(url.clone())
} else {
Err(Error::new(
eyre!("{}", t!("registry.context.registry-required")),
ErrorKind::InvalidRequest,
))
}
}
pub async fn call_registry(
ctx: &CliContext,
url: Url,
method: &str,
params: Value,
) -> Result<Value, Error> {
let sig_context = url.host().as_ref().map(InternedString::from_display);
crate::middleware::auth::signature::call_remote(
ctx,
url,
HeaderMap::new(),
sig_context.as_deref(),
method,
params,
)
.await
.map_err(Error::from)
}
pub async fn cli_promote(
ctx: CliContext,
CliPromoteParams {
from,
to,
id,
version,
}: CliPromoteParams,
) -> Result<(), Error> {
if from.is_none() && to.is_none() {
return Err(Error::new(
eyre!("{}", t!("registry.package.promote.need-from-or-to")),
ErrorKind::InvalidRequest,
));
}
let from_url = resolve_registry_url(from.as_ref(), &ctx)?;
let to_url = resolve_registry_url(to.as_ref(), &ctx)?;
// Fetch package info from source registry
let res: Value = call_registry(
&ctx,
from_url,
"package.get",
imbl_value::json!({
"id": &id,
"otherVersions": PackageDetailLevel::Full,
}),
)
.await?;
let response: GetPackageResponseFull = from_value(res)?;
// Find the target version
let version_info = response
.best
.get(&version)
.or_else(|| response.other_versions.get(&version))
.ok_or_else(|| {
Error::new(
eyre!(
"{}",
t!(
"registry.package.promote.version-not-found",
id = &id,
version = &version
)
),
ErrorKind::NotFound,
)
})?;
// Promote each s9pk variant to the target registry
for (_, asset) in &version_info.s9pks {
let commitment = &asset.commitment;
let signature = Ed25519.sign_commitment(ctx.developer_key()?, commitment, SIG_CONTEXT)?;
call_registry(
&ctx,
to_url.clone(),
"package.add",
imbl_value::json!({
"urls": &asset.urls,
"signature": AnySignature::Ed25519(signature),
"commitment": commitment,
}),
)
.await?;
}
Ok(())
}

View File

@@ -194,12 +194,6 @@ impl TryFrom<ManifestV1> for Manifest {
if &*value.id == "nostr" {
value.id = "nostr-rs-relay".parse()?;
}
if &*value.id == "ghost" {
value.id = "ghost-legacy".parse()?;
}
if &*value.id == "synapse" {
value.id = "synapse-legacy".parse()?;
}
Ok(Self {
id: value.id,
version: version.into(),

View File

@@ -134,9 +134,7 @@ pub async fn list_service_interfaces(
.expect("valid json pointer");
let mut watch = context.seed.ctx.db.watch(ptr).await;
let Some(res) = from_value(watch.peek_and_mark_seen()?)? else {
return Ok(BTreeMap::new());
};
let res = from_value(watch.peek_and_mark_seen()?)?;
if let Some(callback) = callback {
let callback = callback.register(&context.seed.persistent_container);

View File

@@ -63,9 +63,8 @@ mod v0_4_0_alpha_20;
mod v0_4_0_alpha_21;
mod v0_4_0_alpha_22;
mod v0_4_0_alpha_23;
mod v0_4_0_beta_0;
pub type Current = v0_4_0_beta_0::Version; // VERSION_BUMP
pub type Current = v0_4_0_alpha_23::Version; // VERSION_BUMP
impl Current {
#[instrument(skip(self, db))]
@@ -196,8 +195,7 @@ enum Version {
V0_4_0_alpha_20(Wrapper<v0_4_0_alpha_20::Version>),
V0_4_0_alpha_21(Wrapper<v0_4_0_alpha_21::Version>),
V0_4_0_alpha_22(Wrapper<v0_4_0_alpha_22::Version>),
V0_4_0_alpha_23(Wrapper<v0_4_0_alpha_23::Version>),
V0_4_0_beta_0(Wrapper<v0_4_0_beta_0::Version>), // VERSION_BUMP
V0_4_0_alpha_23(Wrapper<v0_4_0_alpha_23::Version>), // VERSION_BUMP
Other(exver::Version),
}
@@ -263,8 +261,7 @@ impl Version {
Self::V0_4_0_alpha_20(v) => DynVersion(Box::new(v.0)),
Self::V0_4_0_alpha_21(v) => DynVersion(Box::new(v.0)),
Self::V0_4_0_alpha_22(v) => DynVersion(Box::new(v.0)),
Self::V0_4_0_alpha_23(v) => DynVersion(Box::new(v.0)),
Self::V0_4_0_beta_0(v) => DynVersion(Box::new(v.0)), // VERSION_BUMP
Self::V0_4_0_alpha_23(v) => DynVersion(Box::new(v.0)), // VERSION_BUMP
Self::Other(v) => {
return Err(Error::new(
eyre!("unknown version {v}"),
@@ -322,8 +319,7 @@ impl Version {
Version::V0_4_0_alpha_20(Wrapper(x)) => x.semver(),
Version::V0_4_0_alpha_21(Wrapper(x)) => x.semver(),
Version::V0_4_0_alpha_22(Wrapper(x)) => x.semver(),
Version::V0_4_0_alpha_23(Wrapper(x)) => x.semver(),
Version::V0_4_0_beta_0(Wrapper(x)) => x.semver(), // VERSION_BUMP
Version::V0_4_0_alpha_23(Wrapper(x)) => x.semver(), // VERSION_BUMP
Version::Other(x) => x.clone(),
}
}

View File

@@ -40,19 +40,99 @@ lazy_static::lazy_static! {
);
}
/// All pre-0.4.0 StartOS images were initialized with the en_GB.UTF-8 locale.
/// The current trixie image does not ship it. Without it PostgreSQL starts
/// but refuses connections, breaking the migration.
async fn ensure_en_gb_locale() -> Result<(), Error> {
Command::new("localedef")
.arg("-i")
.arg("en_GB")
.arg("-c")
.arg("-f")
.arg("UTF-8")
.arg("en_GB.UTF-8")
.invoke(crate::ErrorKind::Database)
.await?;
/// Detect the LC_COLLATE / LC_CTYPE the cluster was created with and generate
/// those locales if they are missing from the running system. Older installs
/// may have been initialized with a locale (e.g. en_GB.UTF-8) that the current
/// image does not ship. Without it PostgreSQL starts but refuses
/// connections, breaking the migration.
async fn ensure_cluster_locale(pg_version: u32) -> Result<(), Error> {
let cluster_dir = format!("/var/lib/postgresql/{pg_version}/main");
let pg_controldata = format!("/usr/lib/postgresql/{pg_version}/bin/pg_controldata");
let output = Command::new(&pg_controldata)
.arg(&cluster_dir)
.kill_on_drop(true)
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.output()
.await
.with_kind(crate::ErrorKind::Database)?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
tracing::warn!("pg_controldata failed, skipping locale check: {stderr}");
return Ok(());
}
let stdout = String::from_utf8_lossy(&output.stdout);
let mut locales_needed = Vec::new();
for line in stdout.lines() {
let locale = if let Some(rest) = line.strip_prefix("LC_COLLATE:") {
rest.trim()
} else if let Some(rest) = line.strip_prefix("LC_CTYPE:") {
rest.trim()
} else {
continue;
};
if !locale.is_empty() && locale != "C" && locale != "POSIX" {
locales_needed.push(locale.to_owned());
}
}
locales_needed.sort();
locales_needed.dedup();
if locales_needed.is_empty() {
return Ok(());
}
// Check which locales are already available.
let available = Command::new("locale")
.arg("-a")
.kill_on_drop(true)
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::null())
.output()
.await
.map(|o| String::from_utf8_lossy(&o.stdout).to_string())
.unwrap_or_default();
let mut need_gen = false;
for locale in &locales_needed {
// locale -a normalizes e.g. "en_GB.UTF-8" → "en_GB.utf8"
let normalized = locale.replace("-", "").to_lowercase();
if available.lines().any(|l| l.replace("-", "").to_lowercase() == normalized) {
continue;
}
// Debian's locale-gen ignores positional args — the locale must be
// uncommented in /etc/locale.gen or appended to it.
tracing::info!("Enabling missing locale for PostgreSQL cluster: {locale}");
let locale_gen_path = Path::new("/etc/locale.gen");
let contents = tokio::fs::read_to_string(locale_gen_path)
.await
.unwrap_or_default();
// Try to uncomment an existing entry first, otherwise append.
let entry = format!("{locale} UTF-8");
let commented = format!("# {entry}");
if contents.contains(&commented) {
let updated = contents.replace(&commented, &entry);
tokio::fs::write(locale_gen_path, updated).await?;
} else if !contents.contains(&entry) {
use tokio::io::AsyncWriteExt;
let mut f = tokio::fs::OpenOptions::new()
.create(true)
.append(true)
.open(locale_gen_path)
.await?;
f.write_all(format!("\n{entry}\n").as_bytes()).await?;
}
need_gen = true;
}
if need_gen {
Command::new("locale-gen")
.invoke(crate::ErrorKind::Database)
.await?;
}
Ok(())
}
@@ -111,7 +191,7 @@ async fn init_postgres(datadir: impl AsRef<Path>) -> Result<PgPool, Error> {
// current image (e.g. en_GB.UTF-8 on a server that predates the trixie
// image). Detect and generate it before starting PostgreSQL, otherwise
// PG will start but refuse connections.
ensure_en_gb_locale().await?;
ensure_cluster_locale(pg_version).await?;
Command::new("systemctl")
.arg("start")
@@ -348,28 +428,6 @@ impl VersionT for Version {
.await?;
}
if tokio::fs::metadata("/media/startos/data/package-data/volumes/ghost")
.await
.is_ok()
{
tokio::fs::rename(
"/media/startos/data/package-data/volumes/ghost",
"/media/startos/data/package-data/volumes/ghost-legacy",
)
.await?;
}
if tokio::fs::metadata("/media/startos/data/package-data/volumes/synapse")
.await
.is_ok()
{
tokio::fs::rename(
"/media/startos/data/package-data/volumes/synapse",
"/media/startos/data/package-data/volumes/synapse-legacy",
)
.await?;
}
// Load bundled migration images (start9/compat, start9/utils,
// tonistiigi/binfmt) so the v1->v2 s9pk conversion doesn't need
// internet access.
@@ -400,10 +458,7 @@ impl VersionT for Version {
loop {
interval.tick().await;
if let Some(ref id) = *current_package.borrow() {
tracing::info!(
"{}",
t!("migration.migrating-package", package = id.to_string())
);
tracing::info!("{}", t!("migration.migrating-package", package = id.to_string()));
}
}
})
@@ -448,10 +503,7 @@ impl VersionT for Version {
false
};
tracing::info!(
"{}",
t!("migration.migrating-package", package = id.to_string())
);
tracing::info!("{}", t!("migration.migrating-package", package = id.to_string()));
current_package.send_replace(Some(id.clone()));
if let Err(e) = async {

View File

@@ -1,37 +0,0 @@
use exver::{PreReleaseSegment, VersionRange};
use super::v0_3_5::V0_3_0_COMPAT;
use super::{VersionT, v0_4_0_alpha_23};
use crate::prelude::*;
lazy_static::lazy_static! {
static ref V0_4_0_beta_0: exver::Version = exver::Version::new(
[0, 4, 0],
[PreReleaseSegment::String("beta".into()), 0.into()]
);
}
#[derive(Clone, Copy, Debug, Default)]
pub struct Version;
impl VersionT for Version {
type Previous = v0_4_0_alpha_23::Version;
type PreUpRes = ();
async fn pre_up(self) -> Result<Self::PreUpRes, Error> {
Ok(())
}
fn semver(self) -> exver::Version {
V0_4_0_beta_0.clone()
}
fn compat(self) -> &'static VersionRange {
&V0_3_0_COMPAT
}
#[instrument(skip_all)]
fn up(self, _db: &mut Value, _: Self::PreUpRes) -> Result<Value, Error> {
Ok(Value::Null)
}
fn down(self, _db: &mut Value) -> Result<(), Error> {
Ok(())
}
}

View File

@@ -1,3 +0,0 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type CountEntry = { label: string; count: bigint }

View File

@@ -1,9 +0,0 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { CountEntry } from './CountEntry'
import type { PackageVersionCount } from './PackageVersionCount'
export type DownloadsResponse = {
totalRequests: bigint
byPackage: Array<CountEntry>
byPackageVersion: Array<PackageVersionCount>
}

View File

@@ -1,20 +0,0 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type GetDownloadsParams = {
/**
* Filter by package ID
*/
pkgId: string | null
/**
* Filter by version
*/
version: string | null
/**
* Start of time range (RFC 3339)
*/
after: string | null
/**
* End of time range (RFC 3339)
*/
before: string | null
}

View File

@@ -1,12 +0,0 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type GetUsersParams = {
/**
* Start of time range (RFC 3339)
*/
after: string | null
/**
* End of time range (RFC 3339)
*/
before: string | null
}

View File

@@ -1,10 +0,0 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { CountEntry } from './CountEntry'
export type MetricsSummary = {
totalCheckins: bigint
uniqueServers: bigint
totalPackageRequests: bigint
byArch: Array<CountEntry>
byOsVersion: Array<CountEntry>
}

View File

@@ -1,7 +0,0 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type PackageVersionCount = {
pkgId: string
version: string
count: bigint
}

View File

@@ -7,6 +7,8 @@ import type { ServerStatus } from './ServerStatus'
import type { SmtpValue } from './SmtpValue'
export type ServerInfo = {
arch: string
platform: string
id: string
name: string
hostname: string

View File

@@ -1,3 +0,0 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type UsersResponse = { uniqueServers: bigint; totalCheckins: bigint }

View File

@@ -74,7 +74,6 @@ export { ClearTasksParams } from './ClearTasksParams'
export { CliSetIconParams } from './CliSetIconParams'
export { ContactInfo } from './ContactInfo'
export { ControlParams } from './ControlParams'
export { CountEntry } from './CountEntry'
export { CreateSubcontainerFsParams } from './CreateSubcontainerFsParams'
export { CreateTaskParams } from './CreateTaskParams'
export { CurrentDependencies } from './CurrentDependencies'
@@ -91,7 +90,6 @@ export { DestroySubcontainerFsParams } from './DestroySubcontainerFsParams'
export { DeviceFilter } from './DeviceFilter'
export { DnsSettings } from './DnsSettings'
export { DomainSettings } from './DomainSettings'
export { DownloadsResponse } from './DownloadsResponse'
export { Duration } from './Duration'
export { EchoParams } from './EchoParams'
export { EditSignerParams } from './EditSignerParams'
@@ -112,7 +110,6 @@ export { GenerateCertificateParams } from './GenerateCertificateParams'
export { GenerateCertificateResponse } from './GenerateCertificateResponse'
export { GetActionInputParams } from './GetActionInputParams'
export { GetContainerIpParams } from './GetContainerIpParams'
export { GetDownloadsParams } from './GetDownloadsParams'
export { GetHostInfoParams } from './GetHostInfoParams'
export { GetOsAssetParams } from './GetOsAssetParams'
export { GetOsVersionParams } from './GetOsVersionParams'
@@ -127,7 +124,6 @@ export { GetSslCertificateParams } from './GetSslCertificateParams'
export { GetSslKeyParams } from './GetSslKeyParams'
export { GetStatusParams } from './GetStatusParams'
export { GetSystemSmtpParams } from './GetSystemSmtpParams'
export { GetUsersParams } from './GetUsersParams'
export { GigaBytes } from './GigaBytes'
export { GitHash } from './GitHash'
export { Governor } from './Governor'
@@ -179,7 +175,6 @@ export { MetricsDisk } from './MetricsDisk'
export { MetricsFollowResponse } from './MetricsFollowResponse'
export { MetricsGeneral } from './MetricsGeneral'
export { MetricsMemory } from './MetricsMemory'
export { MetricsSummary } from './MetricsSummary'
export { Metrics } from './Metrics'
export { ModifyNotificationBeforeParams } from './ModifyNotificationBeforeParams'
export { ModifyNotificationParams } from './ModifyNotificationParams'
@@ -207,7 +202,6 @@ export { PackageInfoShort } from './PackageInfoShort'
export { PackageInfo } from './PackageInfo'
export { PackagePlugin } from './PackagePlugin'
export { PackageState } from './PackageState'
export { PackageVersionCount } from './PackageVersionCount'
export { PackageVersionInfo } from './PackageVersionInfo'
export { PartitionInfo } from './PartitionInfo'
export { PassthroughInfo } from './PassthroughInfo'
@@ -306,7 +300,6 @@ export { UrlPluginClearUrlsParams } from './UrlPluginClearUrlsParams'
export { UrlPluginExportUrlParams } from './UrlPluginExportUrlParams'
export { UrlPluginRegisterParams } from './UrlPluginRegisterParams'
export { UrlPluginRegistration } from './UrlPluginRegistration'
export { UsersResponse } from './UsersResponse'
export { VerifyCifsParams } from './VerifyCifsParams'
export { VersionSignerParams } from './VersionSignerParams'
export { Version } from './Version'

View File

@@ -70,7 +70,7 @@ import { createVolumes } from './util/Volume'
import { getDataVersion, setDataVersion } from './version'
/** The minimum StartOS version required by this SDK release */
export const OSVersion = testTypeVersion('0.4.0-beta.0')
export const OSVersion = testTypeVersion('0.4.0-alpha.23')
// prettier-ignore
type AnyNeverCond<T extends any[], Then, Else> =

View File

@@ -1,12 +1,12 @@
{
"name": "@start9labs/start-sdk",
"version": "1.0.0",
"version": "0.4.0-beta.66",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@start9labs/start-sdk",
"version": "1.0.0",
"version": "0.4.0-beta.66",
"license": "MIT",
"dependencies": {
"@iarna/toml": "^3.0.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@start9labs/start-sdk",
"version": "1.0.0",
"version": "0.4.0-beta.66",
"description": "Software development kit to facilitate packaging services for StartOS",
"main": "./package/lib/index.js",
"types": "./package/lib/index.d.ts",

4
web/package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "startos-ui",
"version": "0.4.0-beta.0",
"version": "0.4.0-alpha.23",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "startos-ui",
"version": "0.4.0-beta.0",
"version": "0.4.0-alpha.23",
"license": "MIT",
"dependencies": {
"@angular/cdk": "^21.2.1",

View File

@@ -1,6 +1,6 @@
{
"name": "startos-ui",
"version": "0.4.0-beta.0",
"version": "0.4.0-alpha.23",
"author": "Start9 Labs, Inc",
"homepage": "https://start9.com/",
"license": "MIT",

View File

@@ -14,6 +14,7 @@ export const mockPatchData: DataModel = {
snakeHighScore: 0,
},
serverInfo: {
arch: 'x86_64',
id: 'abcdefgh',
version,
lastBackup: new Date(new Date().valueOf() - 604800001).toISOString(),
@@ -239,6 +240,7 @@ export const mockPatchData: DataModel = {
ntpSynced: false,
smtp: null,
echoipUrls: ['https://ipconfig.me', 'https://ifconfig.co'],
platform: 'x86_64-nonfree',
zram: true,
governor: 'performance',
ram: 8 * 1024 * 1024 * 1024,