mirror of
https://github.com/Start9Labs/start-os.git
synced 2026-03-26 02:11:53 +00:00
feat: OTA updates for start-tunnel via apt repository (untested)
- Add apt repo publish script (build/apt/publish-deb.sh) for S3-hosted repo - Add apt source config and GPG key placeholder (apt/) - Add tunnel.update.check and tunnel.update.apply RPC endpoints - Wire up update API in tunnel frontend (api service + mock) - Uses systemd-run --scope to survive service restart during update
This commit is contained in:
5
Makefile
5
Makefile
@@ -139,6 +139,11 @@ install-tunnel: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox
|
||||
$(call mkdir,$(DESTDIR)/usr/lib/startos/scripts)
|
||||
$(call cp,build/lib/scripts/forward-port,$(DESTDIR)/usr/lib/startos/scripts/forward-port)
|
||||
|
||||
$(call mkdir,$(DESTDIR)/etc/apt/sources.list.d)
|
||||
$(call cp,apt/start9.list,$(DESTDIR)/etc/apt/sources.list.d/start9.list)
|
||||
$(call mkdir,$(DESTDIR)/usr/share/keyrings)
|
||||
$(call cp,apt/start9.gpg,$(DESTDIR)/usr/share/keyrings/start9.gpg)
|
||||
|
||||
core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox: $(CORE_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) web/dist/static/start-tunnel/index.html
|
||||
ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build/build-tunnelbox.sh
|
||||
|
||||
|
||||
0
apt/start9.gpg
Normal file
0
apt/start9.gpg
Normal file
1
apt/start9.list
Normal file
1
apt/start9.list
Normal file
@@ -0,0 +1 @@
|
||||
deb [arch=amd64,arm64,riscv64 signed-by=/usr/share/keyrings/start9.gpg] https://start9-debs.nyc3.cdn.digitaloceanspaces.com stable main
|
||||
130
build/apt/publish-deb.sh
Executable file
130
build/apt/publish-deb.sh
Executable file
@@ -0,0 +1,130 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Publish .deb files to an S3-hosted apt repository.
|
||||
#
|
||||
# Usage: publish-deb.sh <deb-file-or-directory> [<deb-file-or-directory> ...]
|
||||
#
|
||||
# Environment variables:
|
||||
# GPG_PRIVATE_KEY - Armored GPG private key (imported if set)
|
||||
# GPG_KEY_ID - GPG key ID for signing
|
||||
# S3_ACCESS_KEY - S3 access key
|
||||
# S3_SECRET_KEY - S3 secret key
|
||||
# S3_ENDPOINT - S3 endpoint (default: https://nyc3.digitaloceanspaces.com)
|
||||
# S3_BUCKET - S3 bucket name (default: start9-debs)
|
||||
# SUITE - Apt suite name (default: stable)
|
||||
# COMPONENT - Apt component name (default: main)
|
||||
|
||||
set -e
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Usage: $0 <deb-file-or-directory> [...]" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BUCKET="${S3_BUCKET:-start9-debs}"
|
||||
ENDPOINT="${S3_ENDPOINT:-https://nyc3.digitaloceanspaces.com}"
|
||||
SUITE="${SUITE:-stable}"
|
||||
COMPONENT="${COMPONENT:-main}"
|
||||
REPO_DIR="$(mktemp -d)"
|
||||
|
||||
cleanup() {
|
||||
rm -rf "$REPO_DIR"
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
# Import GPG key if provided
|
||||
if [ -n "$GPG_PRIVATE_KEY" ]; then
|
||||
echo "$GPG_PRIVATE_KEY" | gpg --batch --import 2>/dev/null
|
||||
fi
|
||||
|
||||
# Configure s3cmd
|
||||
S3CMD_CONFIG="$(mktemp)"
|
||||
cat > "$S3CMD_CONFIG" <<EOF
|
||||
[default]
|
||||
access_key = ${S3_ACCESS_KEY}
|
||||
secret_key = ${S3_SECRET_KEY}
|
||||
host_base = $(echo "$ENDPOINT" | sed 's|https://||')
|
||||
host_bucket = %(bucket)s.$(echo "$ENDPOINT" | sed 's|https://||')
|
||||
use_https = True
|
||||
EOF
|
||||
|
||||
s3() {
|
||||
s3cmd -c "$S3CMD_CONFIG" "$@"
|
||||
}
|
||||
|
||||
# Sync existing repo from S3
|
||||
echo "Syncing existing repo from s3://${BUCKET}/ ..."
|
||||
s3 sync --no-mime-magic "s3://${BUCKET}/" "$REPO_DIR/" 2>/dev/null || true
|
||||
|
||||
# Collect all .deb files from arguments
|
||||
DEB_FILES=()
|
||||
for arg in "$@"; do
|
||||
if [ -d "$arg" ]; then
|
||||
while IFS= read -r -d '' f; do
|
||||
DEB_FILES+=("$f")
|
||||
done < <(find "$arg" -name '*.deb' -print0)
|
||||
elif [ -f "$arg" ]; then
|
||||
DEB_FILES+=("$arg")
|
||||
else
|
||||
echo "Warning: $arg is not a file or directory, skipping" >&2
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#DEB_FILES[@]} -eq 0 ]; then
|
||||
echo "No .deb files found" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Copy each deb to the pool
|
||||
for deb in "${DEB_FILES[@]}"; do
|
||||
PKG_NAME="$(dpkg-deb --field "$deb" Package)"
|
||||
POOL_DIR="$REPO_DIR/pool/${COMPONENT}/${PKG_NAME:0:1}/${PKG_NAME}"
|
||||
mkdir -p "$POOL_DIR"
|
||||
cp "$deb" "$POOL_DIR/"
|
||||
echo "Added: $(basename "$deb") -> pool/${COMPONENT}/${PKG_NAME:0:1}/${PKG_NAME}/"
|
||||
done
|
||||
|
||||
# Generate Packages indices for each architecture
|
||||
for arch in amd64 arm64 riscv64; do
|
||||
BINARY_DIR="$REPO_DIR/dists/${SUITE}/${COMPONENT}/binary-${arch}"
|
||||
mkdir -p "$BINARY_DIR"
|
||||
(
|
||||
cd "$REPO_DIR"
|
||||
dpkg-scanpackages --arch "$arch" pool/ > "$BINARY_DIR/Packages"
|
||||
gzip -k -f "$BINARY_DIR/Packages"
|
||||
)
|
||||
echo "Generated Packages index for ${arch}"
|
||||
done
|
||||
|
||||
# Generate Release file
|
||||
(
|
||||
cd "$REPO_DIR/dists/${SUITE}"
|
||||
apt-ftparchive release \
|
||||
-o "APT::FTPArchive::Release::Origin=Start9" \
|
||||
-o "APT::FTPArchive::Release::Label=Start9" \
|
||||
-o "APT::FTPArchive::Release::Suite=${SUITE}" \
|
||||
-o "APT::FTPArchive::Release::Codename=${SUITE}" \
|
||||
-o "APT::FTPArchive::Release::Architectures=amd64 arm64 riscv64" \
|
||||
-o "APT::FTPArchive::Release::Components=${COMPONENT}" \
|
||||
. > Release
|
||||
)
|
||||
echo "Generated Release file"
|
||||
|
||||
# Sign if GPG key is available
|
||||
if [ -n "$GPG_KEY_ID" ]; then
|
||||
(
|
||||
cd "$REPO_DIR/dists/${SUITE}"
|
||||
gpg --default-key "$GPG_KEY_ID" --batch --yes --detach-sign -o Release.gpg Release
|
||||
gpg --default-key "$GPG_KEY_ID" --batch --yes --clearsign -o InRelease Release
|
||||
)
|
||||
echo "Signed Release file with key ${GPG_KEY_ID}"
|
||||
else
|
||||
echo "Warning: GPG_KEY_ID not set, Release file is unsigned" >&2
|
||||
fi
|
||||
|
||||
# Upload to S3
|
||||
echo "Uploading to s3://${BUCKET}/ ..."
|
||||
s3 sync --acl-public --no-mime-magic "$REPO_DIR/" "s3://${BUCKET}/"
|
||||
|
||||
rm -f "$S3CMD_CONFIG"
|
||||
echo "Done."
|
||||
@@ -3972,6 +3972,13 @@ about.allow-gateway-infer-inbound-access-from-wan:
|
||||
fr_FR: "Permettre à cette passerelle de déduire si elle a un accès entrant depuis le WAN en fonction de son adresse IPv4"
|
||||
pl_PL: "Pozwól tej bramce wywnioskować, czy ma dostęp przychodzący z WAN na podstawie adresu IPv4"
|
||||
|
||||
about.apply-available-update:
|
||||
en_US: "Apply available update"
|
||||
de_DE: "Verfügbares Update anwenden"
|
||||
es_ES: "Aplicar actualización disponible"
|
||||
fr_FR: "Appliquer la mise à jour disponible"
|
||||
pl_PL: "Zastosuj dostępną aktualizację"
|
||||
|
||||
about.calculate-blake3-hash-for-file:
|
||||
en_US: "Calculate blake3 hash for a file"
|
||||
de_DE: "Blake3-Hash für eine Datei berechnen"
|
||||
@@ -3993,6 +4000,13 @@ about.check-dns-configuration:
|
||||
fr_FR: "Vérifier la configuration DNS d'une passerelle"
|
||||
pl_PL: "Sprawdź konfigurację DNS bramy"
|
||||
|
||||
about.check-for-updates:
|
||||
en_US: "Check for available updates"
|
||||
de_DE: "Nach verfügbaren Updates suchen"
|
||||
es_ES: "Buscar actualizaciones disponibles"
|
||||
fr_FR: "Vérifier les mises à jour disponibles"
|
||||
pl_PL: "Sprawdź dostępne aktualizacje"
|
||||
|
||||
about.check-update-startos:
|
||||
en_US: "Check a given registry for StartOS updates and update if available"
|
||||
de_DE: "Ein bestimmtes Registry auf StartOS-Updates prüfen und bei Verfügbarkeit aktualisieren"
|
||||
|
||||
@@ -53,6 +53,24 @@ pub fn tunnel_api<C: Context>() -> ParentHandler<C> {
|
||||
.with_call_remote::<CliContext>(),
|
||||
),
|
||||
)
|
||||
.subcommand(
|
||||
"update",
|
||||
ParentHandler::<C>::new()
|
||||
.subcommand(
|
||||
"check",
|
||||
from_fn_async(super::update::check_update)
|
||||
.with_display_serializable()
|
||||
.with_about("about.check-for-updates")
|
||||
.with_call_remote::<CliContext>(),
|
||||
)
|
||||
.subcommand(
|
||||
"apply",
|
||||
from_fn_async(super::update::apply_update)
|
||||
.with_display_serializable()
|
||||
.with_about("about.apply-available-update")
|
||||
.with_call_remote::<CliContext>(),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Parser)]
|
||||
|
||||
@@ -9,6 +9,7 @@ pub mod api;
|
||||
pub mod auth;
|
||||
pub mod context;
|
||||
pub mod db;
|
||||
pub mod update;
|
||||
pub mod web;
|
||||
pub mod wg;
|
||||
|
||||
|
||||
109
core/src/tunnel/update.rs
Normal file
109
core/src/tunnel/update.rs
Normal file
@@ -0,0 +1,109 @@
|
||||
use std::process::Stdio;
|
||||
|
||||
use rpc_toolkit::Empty;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::process::Command;
|
||||
use tracing::instrument;
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::prelude::*;
|
||||
use crate::tunnel::context::TunnelContext;
|
||||
use crate::util::Invoke;
|
||||
|
||||
#[derive(Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TunnelUpdateResult {
|
||||
/// "up-to-date", "update-available", or "updating"
|
||||
pub status: String,
|
||||
/// Currently installed version
|
||||
pub installed: String,
|
||||
/// Available candidate version
|
||||
pub candidate: String,
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
pub async fn check_update(_ctx: TunnelContext, _: Empty) -> Result<TunnelUpdateResult, Error> {
|
||||
Command::new("apt-get")
|
||||
.arg("update")
|
||||
.invoke(ErrorKind::UpdateFailed)
|
||||
.await?;
|
||||
|
||||
let policy_output = Command::new("apt-cache")
|
||||
.arg("policy")
|
||||
.arg("start-tunnel")
|
||||
.invoke(ErrorKind::UpdateFailed)
|
||||
.await?;
|
||||
|
||||
let policy_str = String::from_utf8_lossy(&policy_output).to_string();
|
||||
let installed = parse_version_field(&policy_str, "Installed:");
|
||||
let candidate = parse_version_field(&policy_str, "Candidate:");
|
||||
|
||||
let status = if installed == candidate {
|
||||
"up-to-date"
|
||||
} else {
|
||||
"update-available"
|
||||
};
|
||||
|
||||
Ok(TunnelUpdateResult {
|
||||
status: status.to_string(),
|
||||
installed: installed.unwrap_or_default(),
|
||||
candidate: candidate.unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
pub async fn apply_update(_ctx: TunnelContext, _: Empty) -> Result<TunnelUpdateResult, Error> {
|
||||
let policy_output = Command::new("apt-cache")
|
||||
.arg("policy")
|
||||
.arg("start-tunnel")
|
||||
.invoke(ErrorKind::UpdateFailed)
|
||||
.await?;
|
||||
|
||||
let policy_str = String::from_utf8_lossy(&policy_output).to_string();
|
||||
let installed = parse_version_field(&policy_str, "Installed:");
|
||||
let candidate = parse_version_field(&policy_str, "Candidate:");
|
||||
|
||||
if installed == candidate {
|
||||
return Ok(TunnelUpdateResult {
|
||||
status: "up-to-date".to_string(),
|
||||
installed: installed.unwrap_or_default(),
|
||||
candidate: candidate.unwrap_or_default(),
|
||||
});
|
||||
}
|
||||
|
||||
// Spawn in a separate cgroup via systemd-run so the process survives
|
||||
// when the postinst script restarts start-tunneld.service.
|
||||
// After the install completes, reboot the system.
|
||||
Command::new("systemd-run")
|
||||
.arg("--scope")
|
||||
.arg("--")
|
||||
.arg("sh")
|
||||
.arg("-c")
|
||||
.arg("apt-get install --only-upgrade -y start-tunnel && reboot")
|
||||
.env("DEBIAN_FRONTEND", "noninteractive")
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.spawn()
|
||||
.with_kind(ErrorKind::UpdateFailed)?;
|
||||
|
||||
Ok(TunnelUpdateResult {
|
||||
status: "updating".to_string(),
|
||||
installed: installed.unwrap_or_default(),
|
||||
candidate: candidate.unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_version_field(policy: &str, field: &str) -> Option<String> {
|
||||
policy
|
||||
.lines()
|
||||
.find(|l| l.trim().starts_with(field))
|
||||
.and_then(|l| l.split_whitespace().nth(1))
|
||||
.filter(|v| *v != "(none)")
|
||||
.map(|s| s.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn export_bindings_tunnel_update() {
|
||||
TunnelUpdateResult::export_all_to("bindings/tunnel").unwrap();
|
||||
}
|
||||
16
sdk/base/lib/osBindings/tunnel/TunnelUpdateResult.ts
Normal file
16
sdk/base/lib/osBindings/tunnel/TunnelUpdateResult.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type TunnelUpdateResult = {
|
||||
/**
|
||||
* "up-to-date", "update-available", or "updating"
|
||||
*/
|
||||
status: string
|
||||
/**
|
||||
* Currently installed version
|
||||
*/
|
||||
installed: string
|
||||
/**
|
||||
* Available candidate version
|
||||
*/
|
||||
candidate: string
|
||||
}
|
||||
@@ -25,6 +25,9 @@ export abstract class ApiService {
|
||||
// forwards
|
||||
abstract addForward(params: AddForwardReq): Promise<null> // port-forward.add
|
||||
abstract deleteForward(params: DeleteForwardReq): Promise<null> // port-forward.remove
|
||||
// update
|
||||
abstract checkUpdate(): Promise<TunnelUpdateResult> // update.check
|
||||
abstract applyUpdate(): Promise<TunnelUpdateResult> // update.apply
|
||||
}
|
||||
|
||||
export type SubscribeRes = {
|
||||
@@ -62,3 +65,9 @@ export type AddForwardReq = {
|
||||
export type DeleteForwardReq = {
|
||||
source: string
|
||||
}
|
||||
|
||||
export type TunnelUpdateResult = {
|
||||
status: string
|
||||
installed: string
|
||||
candidate: string
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
DeleteSubnetReq,
|
||||
LoginReq,
|
||||
SubscribeRes,
|
||||
TunnelUpdateResult,
|
||||
UpsertDeviceReq,
|
||||
UpsertSubnetReq,
|
||||
} from './api.service'
|
||||
@@ -103,6 +104,16 @@ export class LiveApiService extends ApiService {
|
||||
return this.rpcRequest({ method: 'port-forward.remove', params })
|
||||
}
|
||||
|
||||
// update
|
||||
|
||||
async checkUpdate(): Promise<TunnelUpdateResult> {
|
||||
return this.rpcRequest({ method: 'update.check', params: {} })
|
||||
}
|
||||
|
||||
async applyUpdate(): Promise<TunnelUpdateResult> {
|
||||
return this.rpcRequest({ method: 'update.apply', params: {} })
|
||||
}
|
||||
|
||||
// private
|
||||
|
||||
private async upsertSubnet(params: UpsertSubnetReq): Promise<null> {
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
DeleteSubnetReq,
|
||||
LoginReq,
|
||||
SubscribeRes,
|
||||
TunnelUpdateResult,
|
||||
UpsertDeviceReq,
|
||||
UpsertSubnetReq,
|
||||
} from './api.service'
|
||||
@@ -196,6 +197,24 @@ export class MockApiService extends ApiService {
|
||||
return null
|
||||
}
|
||||
|
||||
async checkUpdate(): Promise<TunnelUpdateResult> {
|
||||
await pauseFor(1000)
|
||||
return {
|
||||
status: 'update-available',
|
||||
installed: '0.4.0-alpha.19',
|
||||
candidate: '0.4.0-alpha.20',
|
||||
}
|
||||
}
|
||||
|
||||
async applyUpdate(): Promise<TunnelUpdateResult> {
|
||||
await pauseFor(2000)
|
||||
return {
|
||||
status: 'updating',
|
||||
installed: '0.4.0-alpha.19',
|
||||
candidate: '0.4.0-alpha.20',
|
||||
}
|
||||
}
|
||||
|
||||
private async mockRevision<T>(patch: Operation<T>[]): Promise<void> {
|
||||
const revision = {
|
||||
id: ++this.sequence,
|
||||
|
||||
Reference in New Issue
Block a user