Feature/cloud backups (#889)

* cifs for cloud backups on lan

* password spelling fix

* fix spelling and fix rpc method

* fix other methods

* remove old code and rename method

* add support for cifs backup targets

wip

cifs api

simplify idiom

add doc comment

wip

wip

should work™

* add password hash to server info

* fix type

* fix types for cifs

* minor fixes for cifs feature

* fix rpc structure

* fix copy, address some TODOs

* add subcommand

* backup path and navigation

* wizard edits

* rebased success page

* wiz conflicts resolved

* current change actually

* only unsub if done

* no fileter if necessary

* fix copy for cifs old password

* setup complete (#913)

* setup complete

* minor fixes

* setup.complete

* complete bool

* setup-wizard: complete boolean

Co-authored-by: Matt Hill <matthewonthemoon@gmail.com>
Co-authored-by: Drew Ansbacher <drew.ansbacher@spiredigital.com>
Co-authored-by: Matt Hill <MattDHill@users.noreply.github.com>
This commit is contained in:
Aiden McClelland
2021-12-07 11:51:04 -07:00
parent 6ee0bf8636
commit e6fb74a800
140 changed files with 3968 additions and 2399 deletions

View File

@@ -47,4 +47,12 @@ CREATE TABLE IF NOT EXISTS notifications
title TEXT NOT NULL,
message TEXT NOT NULL,
data TEXT
);
CREATE TABLE IF NOT EXISTS cifs_shares
(
id INTEGER PRIMARY KEY,
hostname TEXT NOT NULL,
path TEXT NOT NULL,
username TEXT NOT NULL,
password TEXT
);

View File

@@ -40,6 +40,24 @@
"nullable": []
}
},
"1b2242afa55e730b37b00929b656d80940b457ec86c234ddd0de917bd8872611": {
"query": "INSERT INTO cifs_shares (hostname, path, username, password) VALUES (?, ?, ?, ?) RETURNING id AS \"id: u32\"",
"describe": {
"columns": [
{
"name": "id: u32",
"ordinal": 0,
"type_info": "Null"
}
],
"parameters": {
"Right": 4
},
"nullable": [
false
]
}
},
"1eee1fdc793919c391008854407143d7a11b4668486c11a760b49af49992f9f8": {
"query": "REPLACE INTO tor (package, interface, key) VALUES (?, 'main', ?)",
"describe": {
@@ -262,6 +280,48 @@
]
}
},
"668f39c868f90cdbcc635858bac9e55ed73192ed2aec5c52dcfba9800a7a4a41": {
"query": "SELECT id AS \"id: u32\", hostname, path, username, password FROM cifs_shares",
"describe": {
"columns": [
{
"name": "id: u32",
"ordinal": 0,
"type_info": "Int64"
},
{
"name": "hostname",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "path",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "username",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "password",
"ordinal": 4,
"type_info": "Text"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false,
true
]
}
},
"6b9abc9e079cff975f8a7f07ff70548c7877ecae3be0d0f2d3f439a6713326c0": {
"query": "DELETE FROM notifications WHERE id < ?",
"describe": {
@@ -374,12 +434,12 @@
"nullable": []
}
},
"a596bdc5014ba9e7b362398abf09ec6a100923e001247a79503d1e820ffe71c3": {
"query": "-- Add migration script here\nCREATE TABLE IF NOT EXISTS tor\n(\n package TEXT NOT NULL,\n interface TEXT NOT NULL,\n key BLOB NOT NULL CHECK (length(key) = 64),\n PRIMARY KEY (package, interface)\n);\nCREATE TABLE IF NOT EXISTS session\n(\n id TEXT NOT NULL PRIMARY KEY,\n logged_in TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,\n logged_out TIMESTAMP,\n last_active TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,\n user_agent TEXT,\n metadata TEXT NOT NULL DEFAULT 'null'\n);\nCREATE TABLE IF NOT EXISTS account\n(\n id INTEGER PRIMARY KEY CHECK (id = 0),\n password TEXT NOT NULL,\n tor_key BLOB NOT NULL CHECK (length(tor_key) = 64)\n);\nCREATE TABLE IF NOT EXISTS ssh_keys\n(\n fingerprint TEXT NOT NULL,\n openssh_pubkey TEXT NOT NULL,\n created_at TEXT NOT NULL,\n PRIMARY KEY (fingerprint)\n);\nCREATE TABLE IF NOT EXISTS certificates\n(\n id INTEGER PRIMARY KEY, -- Root = 0, Int = 1, Other = 2..\n priv_key_pem TEXT NOT NULL,\n certificate_pem TEXT NOT NULL,\n lookup_string TEXT UNIQUE,\n created_at TEXT,\n updated_at TEXT\n);\nCREATE TABLE IF NOT EXISTS notifications\n(\n id INTEGER PRIMARY KEY,\n package_id TEXT,\n created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,\n code INTEGER NOT NULL,\n level TEXT NOT NULL,\n title TEXT NOT NULL,\n message TEXT NOT NULL,\n data TEXT\n);",
"a4e7162322b28508310b9de7ebc891e619b881ff6d3ea09eba13da39626ab12f": {
"query": "UPDATE cifs_shares SET hostname = ?, path = ?, username = ?, password = ? WHERE id = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 0
"Right": 5
},
"nullable": []
}
@@ -474,6 +534,52 @@
]
}
},
"b376d9e77e0861a9af2d1081ca48d14e83abc5a1546213d15bb570972c403beb": {
"query": "-- Add migration script here\nCREATE TABLE IF NOT EXISTS tor\n(\n package TEXT NOT NULL,\n interface TEXT NOT NULL,\n key BLOB NOT NULL CHECK (length(key) = 64),\n PRIMARY KEY (package, interface)\n);\nCREATE TABLE IF NOT EXISTS session\n(\n id TEXT NOT NULL PRIMARY KEY,\n logged_in TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,\n logged_out TIMESTAMP,\n last_active TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,\n user_agent TEXT,\n metadata TEXT NOT NULL DEFAULT 'null'\n);\nCREATE TABLE IF NOT EXISTS account\n(\n id INTEGER PRIMARY KEY CHECK (id = 0),\n password TEXT NOT NULL,\n tor_key BLOB NOT NULL CHECK (length(tor_key) = 64)\n);\nCREATE TABLE IF NOT EXISTS ssh_keys\n(\n fingerprint TEXT NOT NULL,\n openssh_pubkey TEXT NOT NULL,\n created_at TEXT NOT NULL,\n PRIMARY KEY (fingerprint)\n);\nCREATE TABLE IF NOT EXISTS certificates\n(\n id INTEGER PRIMARY KEY, -- Root = 0, Int = 1, Other = 2..\n priv_key_pem TEXT NOT NULL,\n certificate_pem TEXT NOT NULL,\n lookup_string TEXT UNIQUE,\n created_at TEXT,\n updated_at TEXT\n);\nCREATE TABLE IF NOT EXISTS notifications\n(\n id INTEGER PRIMARY KEY,\n package_id TEXT,\n created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,\n code INTEGER NOT NULL,\n level TEXT NOT NULL,\n title TEXT NOT NULL,\n message TEXT NOT NULL,\n data TEXT\n);\nCREATE TABLE IF NOT EXISTS cifs_shares\n(\n id INTEGER PRIMARY KEY,\n hostname TEXT NOT NULL,\n path TEXT NOT NULL,\n username TEXT NOT NULL,\n password TEXT\n);",
"describe": {
"columns": [],
"parameters": {
"Right": 0
},
"nullable": []
}
},
"cc33fe2958fe7caeac6999a217f918a68b45ad596664170b4d07671c6ea49566": {
"query": "SELECT hostname, path, username, password FROM cifs_shares WHERE id = ?",
"describe": {
"columns": [
{
"name": "hostname",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "path",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "username",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "password",
"ordinal": 3,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
true
]
}
},
"d5117054072476377f3c4f040ea429d4c9b2cf534e76f35c80a2bf60e8599cca": {
"query": "SELECT openssh_pubkey FROM ssh_keys",
"describe": {
@@ -565,5 +671,15 @@
false
]
}
},
"f63c8c5a8754b34a49ef5d67802fa2b72aa409bbec92ecc6901492092974b71a": {
"query": "DELETE FROM cifs_shares WHERE id = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
}
}
}

View File

@@ -5,7 +5,6 @@ use std::net::Ipv4Addr;
use std::path::PathBuf;
use std::time::Duration;
use bollard::container::StopContainerOptions;
use futures::future::Either as EitherFuture;
use nix::sys::signal;
use nix::unistd::Pid;
@@ -16,7 +15,8 @@ use tracing::instrument;
use crate::context::RpcContext;
use crate::id::{Id, ImageId};
use crate::s9pk::manifest::{PackageId, SYSTEM_PACKAGE_ID};
use crate::util::{IoFormat, Version};
use crate::util::serde::IoFormat;
use crate::util::Version;
use crate::volume::{VolumeId, Volumes};
use crate::{Error, ResultExt, HOST_IP};

View File

@@ -16,7 +16,8 @@ use crate::config::{Config, ConfigSpec};
use crate::context::RpcContext;
use crate::id::{Id, InvalidId};
use crate::s9pk::manifest::PackageId;
use crate::util::{display_serializable, parse_stdin_deserializable, IoFormat, Version};
use crate::util::serde::{display_serializable, parse_stdin_deserializable, IoFormat};
use crate::util::Version;
use crate::volume::Volumes;
use crate::{Error, ResultExt};

View File

@@ -14,7 +14,8 @@ use tracing::instrument;
use crate::context::{CliContext, RpcContext};
use crate::middleware::auth::{AsLogoutSessionId, HasLoggedOutSessions, HashSessionToken};
use crate::util::{display_none, display_serializable, IoFormat};
use crate::util::display_none;
use crate::util::serde::{display_serializable, IoFormat};
use crate::{ensure_code, Error, ResultExt};
#[command(subcommands(login, logout, session))]

View File

@@ -1,5 +1,4 @@
use std::collections::BTreeMap;
use std::path::PathBuf;
use std::sync::Arc;
use chrono::Utc;
@@ -14,17 +13,20 @@ use tokio::io::AsyncWriteExt;
use torut::onion::TorSecretKeyV3;
use tracing::instrument;
use super::target::BackupTargetId;
use super::PackageBackupReport;
use crate::auth::check_password_against_db;
use crate::backup::{BackupReport, ServerBackupReport};
use crate::context::RpcContext;
use crate::db::model::ServerStatus;
use crate::db::util::WithRevision;
use crate::disk::util::{BackupMountGuard, TmpMountGuard};
use crate::disk::mount::backup::BackupMountGuard;
use crate::disk::mount::guard::TmpMountGuard;
use crate::notifications::NotificationLevel;
use crate::s9pk::manifest::PackageId;
use crate::status::MainStatus;
use crate::util::{display_none, AtomicFile, IoFormat};
use crate::util::serde::IoFormat;
use crate::util::{display_none, AtomicFile};
use crate::version::VersionT;
use crate::Error;
@@ -112,14 +114,17 @@ impl Serialize for OsBackup {
#[instrument(skip(ctx, old_password, password))]
pub async fn backup_all(
#[context] ctx: RpcContext,
#[arg] logicalname: PathBuf,
#[arg(rename = "target-id")] target_id: BackupTargetId,
#[arg(rename = "old-password", long = "old-password")] old_password: Option<String>,
#[arg] password: String,
) -> Result<WithRevision<()>, Error> {
let mut db = ctx.db.handle();
check_password_against_db(&mut ctx.secret_store.acquire().await?, &password).await?;
let fs = target_id
.load(&mut ctx.secret_store.acquire().await?)
.await?;
let mut backup_guard = BackupMountGuard::mount(
TmpMountGuard::mount(&logicalname, None).await?,
TmpMountGuard::mount(&fs).await?,
old_password.as_ref().unwrap_or(&password),
)
.await?;

View File

@@ -11,19 +11,21 @@ use tokio::fs::File;
use tokio::io::AsyncWriteExt;
use tracing::instrument;
use self::target::PackageBackupInfo;
use crate::action::{ActionImplementation, NoOutput};
use crate::context::RpcContext;
use crate::disk::PackageBackupInfo;
use crate::install::PKG_ARCHIVE_DIR;
use crate::net::interface::{InterfaceId, Interfaces};
use crate::s9pk::manifest::PackageId;
use crate::util::{AtomicFile, IoFormat, Version};
use crate::util::serde::IoFormat;
use crate::util::{AtomicFile, Version};
use crate::version::{Current, VersionT};
use crate::volume::{backup_dir, Volume, VolumeId, Volumes, BACKUP_DIR};
use crate::{Error, ResultExt};
pub mod backup_bulk;
pub mod restore;
pub mod target;
#[derive(Debug, Deserialize, Serialize)]
pub struct BackupReport {
@@ -42,7 +44,7 @@ pub struct PackageBackupReport {
error: Option<String>,
}
#[command(subcommands(backup_bulk::backup_all))]
#[command(subcommands(backup_bulk::backup_all, target::target))]
pub fn backup() -> Result<(), Error> {
Ok(())
}

View File

@@ -1,5 +1,5 @@
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use std::path::Path;
use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::time::Duration;
@@ -16,20 +16,23 @@ use tokio::task::JoinHandle;
use torut::onion::OnionAddressV3;
use tracing::instrument;
use super::target::BackupTargetId;
use crate::auth::check_password_against_db;
use crate::backup::backup_bulk::OsBackup;
use crate::context::{RpcContext, SetupContext};
use crate::db::model::{PackageDataEntry, StaticFiles};
use crate::db::util::WithRevision;
use crate::disk::util::{BackupMountGuard, PackageBackupMountGuard, PartitionInfo, TmpMountGuard};
use crate::disk::mount::backup::{BackupMountGuard, PackageBackupMountGuard};
use crate::disk::mount::guard::TmpMountGuard;
use crate::install::progress::InstallProgress;
use crate::install::{download_install_s9pk, PKG_PUBLIC_DIR};
use crate::net::ssl::SslManager;
use crate::s9pk::manifest::{Manifest, PackageId};
use crate::s9pk::reader::S9pkReader;
use crate::setup::RecoveryStatus;
use crate::util::display_none;
use crate::util::io::dir_size;
use crate::util::{display_none, IoFormat};
use crate::util::serde::IoFormat;
use crate::volume::{backup_dir, BACKUP_DIR, PKG_VOLUME_DIR};
use crate::{Error, ResultExt};
@@ -44,14 +47,17 @@ fn parse_comma_separated(arg: &str, _: &ArgMatches<'_>) -> Result<Vec<PackageId>
pub async fn restore_packages_rpc(
#[context] ctx: RpcContext,
#[arg(parse(parse_comma_separated))] ids: Vec<PackageId>,
#[arg] logicalname: PathBuf,
#[arg(rename = "target-id")] target_id: BackupTargetId,
#[arg(rename = "old-password", long = "old-password")] old_password: Option<String>,
#[arg] password: String,
) -> Result<WithRevision<()>, Error> {
let mut db = ctx.db.handle();
check_password_against_db(&mut ctx.secret_store.acquire().await?, &password).await?;
let fs = target_id
.load(&mut ctx.secret_store.acquire().await?)
.await?;
let mut backup_guard = BackupMountGuard::mount(
TmpMountGuard::mount(&logicalname, None).await?,
TmpMountGuard::mount(&fs).await?,
old_password.as_ref().unwrap_or(&password),
)
.await?;
@@ -140,6 +146,7 @@ impl ProgressInfo {
RecoveryStatus {
total_bytes,
bytes_transferred,
complete: false,
}
}
}
@@ -149,11 +156,11 @@ pub async fn recover_full_embassy(
ctx: SetupContext,
disk_guid: Arc<String>,
embassy_password: String,
recovery_partition: PartitionInfo,
recovery_source: TmpMountGuard,
recovery_password: Option<String>,
) -> Result<(OnionAddressV3, X509, BoxFuture<'static, Result<(), Error>>), Error> {
let backup_guard = BackupMountGuard::mount(
TmpMountGuard::mount(&recovery_partition.logicalname, None).await?,
recovery_source,
recovery_password.as_deref().unwrap_or_default(),
)
.await?;

View File

@@ -0,0 +1,211 @@
use std::path::PathBuf;
use color_eyre::eyre::eyre;
use futures::TryStreamExt;
use rpc_toolkit::command;
use serde::{Deserialize, Serialize};
use sqlx::{Executor, Sqlite};
use super::{BackupTarget, BackupTargetId};
use crate::context::RpcContext;
use crate::disk::mount::filesystem::cifs::Cifs;
use crate::disk::mount::guard::TmpMountGuard;
use crate::disk::util::{recovery_info, EmbassyOsRecoveryInfo};
use crate::util::display_none;
use crate::util::serde::KeyVal;
use crate::Error;
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct CifsBackupTarget {
hostname: String,
path: PathBuf,
username: String,
mountable: bool,
embassy_os: Option<EmbassyOsRecoveryInfo>,
}
#[command(subcommands(add, update, remove))]
pub fn cifs() -> Result<(), Error> {
Ok(())
}
#[command(display(display_none))]
pub async fn add(
#[context] ctx: RpcContext,
#[arg] hostname: String,
#[arg] path: PathBuf,
#[arg] username: String,
#[arg] password: Option<String>,
) -> Result<KeyVal<BackupTargetId, BackupTarget>, Error> {
let cifs = Cifs {
hostname,
path,
username,
password,
};
let guard = TmpMountGuard::mount(&cifs).await?;
let embassy_os = recovery_info(&guard).await?;
guard.unmount().await?;
let path_string = cifs.path.display().to_string();
let id: u32 = sqlx::query!(
"INSERT INTO cifs_shares (hostname, path, username, password) VALUES (?, ?, ?, ?) RETURNING id AS \"id: u32\"",
cifs.hostname,
path_string,
cifs.username,
cifs.password,
)
.fetch_one(&ctx.secret_store)
.await?.id;
Ok(KeyVal {
key: BackupTargetId::Cifs { id },
value: BackupTarget::Cifs(CifsBackupTarget {
hostname: cifs.hostname,
path: cifs.path,
username: cifs.username,
mountable: true,
embassy_os,
}),
})
}
#[command(display(display_none))]
pub async fn update(
#[context] ctx: RpcContext,
#[arg] id: BackupTargetId,
#[arg] hostname: String,
#[arg] path: PathBuf,
#[arg] username: String,
#[arg] password: Option<String>,
) -> Result<KeyVal<BackupTargetId, BackupTarget>, Error> {
let id = if let BackupTargetId::Cifs { id } = id {
id
} else {
return Err(Error::new(
eyre!("Backup Target ID {} Not Found", id),
crate::ErrorKind::NotFound,
));
};
let cifs = Cifs {
hostname,
path,
username,
password,
};
let guard = TmpMountGuard::mount(&cifs).await?;
let embassy_os = recovery_info(&guard).await?;
guard.unmount().await?;
let path_string = cifs.path.display().to_string();
if sqlx::query!(
"UPDATE cifs_shares SET hostname = ?, path = ?, username = ?, password = ? WHERE id = ?",
cifs.hostname,
path_string,
cifs.username,
cifs.password,
id,
)
.execute(&ctx.secret_store)
.await?
.rows_affected()
== 0
{
return Err(Error::new(
eyre!("Backup Target ID {} Not Found", BackupTargetId::Cifs { id }),
crate::ErrorKind::NotFound,
));
};
Ok(KeyVal {
key: BackupTargetId::Cifs { id },
value: BackupTarget::Cifs(CifsBackupTarget {
hostname: cifs.hostname,
path: cifs.path,
username: cifs.username,
mountable: true,
embassy_os,
}),
})
}
#[command(display(display_none))]
pub async fn remove(#[context] ctx: RpcContext, #[arg] id: BackupTargetId) -> Result<(), Error> {
let id = if let BackupTargetId::Cifs { id } = id {
id
} else {
return Err(Error::new(
eyre!("Backup Target ID {} Not Found", id),
crate::ErrorKind::NotFound,
));
};
if sqlx::query!("DELETE FROM cifs_shares WHERE id = ?", id)
.execute(&ctx.secret_store)
.await?
.rows_affected()
== 0
{
return Err(Error::new(
eyre!("Backup Target ID {} Not Found", BackupTargetId::Cifs { id }),
crate::ErrorKind::NotFound,
));
};
Ok(())
}
pub async fn load<Ex>(secrets: &mut Ex, id: u32) -> Result<Cifs, Error>
where
for<'a> &'a mut Ex: Executor<'a, Database = Sqlite>,
{
let record = sqlx::query!(
"SELECT hostname, path, username, password FROM cifs_shares WHERE id = ?",
id
)
.fetch_one(secrets)
.await?;
Ok(Cifs {
hostname: record.hostname,
path: PathBuf::from(record.path),
username: record.username,
password: record.password,
})
}
pub async fn list<Ex>(secrets: &mut Ex) -> Result<Vec<(u32, CifsBackupTarget)>, Error>
where
for<'a> &'a mut Ex: Executor<'a, Database = Sqlite>,
{
let mut records = sqlx::query!(
"SELECT id AS \"id: u32\", hostname, path, username, password FROM cifs_shares"
)
.fetch_many(secrets);
let mut cifs = Vec::new();
while let Some(query_result) = records.try_next().await? {
if let Some(record) = query_result.right() {
let mount_info = Cifs {
hostname: record.hostname,
path: PathBuf::from(record.path),
username: record.username,
password: record.password,
};
let embassy_os = async {
let guard = TmpMountGuard::mount(&mount_info).await?;
let embassy_os = recovery_info(&guard).await?;
guard.unmount().await?;
Ok::<_, Error>(embassy_os)
}
.await;
cifs.push((
record.id,
CifsBackupTarget {
hostname: mount_info.hostname,
path: mount_info.path,
username: mount_info.username,
mountable: embassy_os.is_ok(),
embassy_os: embassy_os.ok().and_then(|a| a),
},
));
}
}
Ok(cifs)
}

View File

@@ -0,0 +1,239 @@
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use clap::ArgMatches;
use color_eyre::eyre::eyre;
use digest::generic_array::GenericArray;
use digest::Digest;
use rpc_toolkit::command;
use serde::{Deserialize, Serialize};
use sha2::Sha256;
use sqlx::{Executor, Sqlite};
use tracing::instrument;
use self::cifs::CifsBackupTarget;
use crate::context::RpcContext;
use crate::disk::mount::backup::BackupMountGuard;
use crate::disk::mount::filesystem::block_dev::BlockDev;
use crate::disk::mount::filesystem::cifs::Cifs;
use crate::disk::mount::filesystem::FileSystem;
use crate::disk::mount::guard::TmpMountGuard;
use crate::disk::util::PartitionInfo;
use crate::s9pk::manifest::PackageId;
use crate::util::serde::{deserialize_from_str, display_serializable, serialize_display};
use crate::util::Version;
use crate::Error;
pub mod cifs;
#[derive(Debug, Deserialize, Serialize)]
#[serde(tag = "type")]
#[serde(rename_all = "kebab-case")]
pub enum BackupTarget {
#[serde(rename_all = "kebab-case")]
Disk {
vendor: Option<String>,
model: Option<String>,
#[serde(flatten)]
partition_info: PartitionInfo,
},
Cifs(CifsBackupTarget),
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum BackupTargetId {
Disk { logicalname: PathBuf },
Cifs { id: u32 },
}
impl BackupTargetId {
pub async fn load<Ex>(self, secrets: &mut Ex) -> Result<BackupTargetFS, Error>
where
for<'a> &'a mut Ex: Executor<'a, Database = Sqlite>,
{
Ok(match self {
BackupTargetId::Disk { logicalname } => {
BackupTargetFS::Disk(BlockDev::new(logicalname))
}
BackupTargetId::Cifs { id } => BackupTargetFS::Cifs(cifs::load(secrets, id).await?),
})
}
}
impl std::fmt::Display for BackupTargetId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
BackupTargetId::Disk { logicalname } => write!(f, "disk-{}", logicalname.display()),
BackupTargetId::Cifs { id } => write!(f, "cifs-{}", id),
}
}
}
impl std::str::FromStr for BackupTargetId {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.split_once("-") {
Some(("disk", logicalname)) => Ok(BackupTargetId::Disk {
logicalname: Path::new(logicalname).to_owned(),
}),
Some(("cifs", id)) => Ok(BackupTargetId::Cifs { id: id.parse()? }),
_ => Err(Error::new(
eyre!("Invalid Backup Target ID"),
crate::ErrorKind::InvalidBackupTargetId,
)),
}
}
}
impl<'de> Deserialize<'de> for BackupTargetId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserialize_from_str(deserializer)
}
}
impl Serialize for BackupTargetId {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serialize_display(self, serializer)
}
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(tag = "type")]
#[serde(rename_all = "kebab-case")]
pub enum BackupTargetFS {
Disk(BlockDev<PathBuf>),
Cifs(Cifs),
}
#[async_trait]
impl FileSystem for BackupTargetFS {
async fn mount<P: AsRef<Path> + Send + Sync>(&self, mountpoint: P) -> Result<(), Error> {
match self {
BackupTargetFS::Disk(a) => a.mount(mountpoint).await,
BackupTargetFS::Cifs(a) => a.mount(mountpoint).await,
}
}
async fn source_hash(&self) -> Result<GenericArray<u8, <Sha256 as Digest>::OutputSize>, Error> {
match self {
BackupTargetFS::Disk(a) => a.source_hash().await,
BackupTargetFS::Cifs(a) => a.source_hash().await,
}
}
}
#[command(subcommands(cifs::cifs, list, info))]
pub fn target() -> Result<(), Error> {
Ok(())
}
#[command(display(display_serializable))]
pub async fn list(
#[context] ctx: RpcContext,
) -> Result<BTreeMap<BackupTargetId, BackupTarget>, Error> {
let mut sql_handle = ctx.secret_store.acquire().await?;
let (disks, cifs) = tokio::try_join!(crate::disk::util::list(), cifs::list(&mut sql_handle),)?;
Ok(disks
.into_iter()
.flat_map(|mut disk| {
std::mem::take(&mut disk.partitions)
.into_iter()
.map(|part| {
(
BackupTargetId::Disk {
logicalname: part.logicalname.clone(),
},
BackupTarget::Disk {
vendor: disk.vendor.clone(),
model: disk.model.clone(),
partition_info: part,
},
)
})
.collect::<Vec<_>>()
})
.chain(
cifs.into_iter()
.map(|(id, cifs)| (BackupTargetId::Cifs { id }, BackupTarget::Cifs(cifs))),
)
.collect())
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct BackupInfo {
pub version: Version,
pub timestamp: Option<DateTime<Utc>>,
pub package_backups: BTreeMap<PackageId, PackageBackupInfo>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct PackageBackupInfo {
pub title: String,
pub version: Version,
pub os_version: Version,
pub timestamp: DateTime<Utc>,
}
fn display_backup_info(info: BackupInfo, matches: &ArgMatches<'_>) {
use prettytable::*;
if matches.is_present("format") {
return display_serializable(info, matches);
}
let mut table = Table::new();
table.add_row(row![bc =>
"ID",
"VERSION",
"OS VERSION",
"TIMESTAMP",
]);
table.add_row(row![
"EMBASSY OS",
info.version.as_str(),
info.version.as_str(),
&if let Some(ts) = &info.timestamp {
ts.to_string()
} else {
"N/A".to_owned()
},
]);
for (id, info) in info.package_backups {
let row = row![
id.as_str(),
info.version.as_str(),
info.os_version.as_str(),
&info.timestamp.to_string(),
];
table.add_row(row);
}
table.print_tty(false);
}
#[command(display(display_backup_info))]
#[instrument(skip(ctx, password))]
pub async fn info(
#[context] ctx: RpcContext,
#[arg(rename = "target-id")] target_id: BackupTargetId,
#[arg] password: String,
) -> Result<BackupInfo, Error> {
let guard = BackupMountGuard::mount(
TmpMountGuard::mount(
&target_id
.load(&mut ctx.secret_store.acquire().await?)
.await?,
)
.await?,
&password,
)
.await?;
let res = guard.metadata.clone();
guard.unmount().await?;
Ok(res)
}

View File

@@ -25,7 +25,6 @@ fn status_fn(_: i32) -> StatusCode {
#[instrument]
async fn setup_or_init(cfg_path: Option<&str>) -> Result<(), Error> {
embassy::disk::util::mount("LABEL=EMBASSY", "/embassy-os").await?;
if tokio::fs::metadata("/embassy-os/disk.guid").await.is_err() {
#[cfg(feature = "avahi")]
let _mdns = MdnsController::init();

View File

@@ -38,6 +38,7 @@ fn err_to_500(e: Error) -> Response<Body> {
#[instrument]
async fn inner_main(cfg_path: Option<&str>) -> Result<Option<Shutdown>, Error> {
let (rpc_ctx, shutdown) = {
embassy::hostname::sync_hostname().await?;
let rpc_ctx = RpcContext::init(
cfg_path,
Arc::new(
@@ -179,16 +180,14 @@ async fn inner_main(cfg_path: Option<&str>) -> Result<Option<Shutdown>, Error> {
None => Response::builder()
.status(StatusCode::NOT_FOUND)
.body(Body::empty()),
Some(cont) => {
match (cont.handler)(req).await {
Ok(r) => Ok(r),
Err(e) => Response::builder()
.status(
StatusCode::INTERNAL_SERVER_ERROR,
)
.body(Body::from(format!("{}", e))),
}
}
Some(cont) => match (cont.handler)(req).await {
Ok(r) => Ok(r),
Err(e) => Response::builder()
.status(
StatusCode::INTERNAL_SERVER_ERROR,
)
.body(Body::from(format!("{}", e))),
},
}
}
}

View File

@@ -96,8 +96,8 @@ impl ConfigActions {
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct SetResult {
#[serde(deserialize_with = "crate::util::deserialize_from_str_opt")]
#[serde(serialize_with = "crate::util::serialize_display_opt")]
#[serde(deserialize_with = "crate::util::serde::deserialize_from_str_opt")]
#[serde(serialize_with = "crate::util::serde::serialize_display_opt")]
pub signal: Option<Signal>,
pub depends_on: BTreeMap<PackageId, BTreeSet<HealthCheckId>>,
}

View File

@@ -23,7 +23,8 @@ use crate::dependencies::{
};
use crate::install::cleanup::remove_current_dependents;
use crate::s9pk::manifest::{Manifest, PackageId};
use crate::util::{display_none, display_serializable, parse_stdin_deserializable, IoFormat};
use crate::util::display_none;
use crate::util::serde::{display_serializable, parse_stdin_deserializable, IoFormat};
use crate::{Error, ResultExt as _};
pub mod action;
@@ -188,7 +189,7 @@ pub fn set(
#[allow(unused_variables)]
#[arg(long = "format")]
format: Option<IoFormat>,
#[arg(long = "timeout")] timeout: Option<crate::util::Duration>,
#[arg(long = "timeout")] timeout: Option<crate::util::serde::Duration>,
#[arg(stdin, parse(parse_stdin_deserializable))] config: Option<Config>,
#[arg(rename = "expire-id", long = "expire-id")] expire_id: Option<String>,
) -> Result<(PackageId, Option<Config>, Option<Duration>, Option<String>), Error> {

View File

@@ -22,7 +22,7 @@ use crate::ResultExt;
pub struct CliContextConfig {
pub bind_rpc: Option<SocketAddr>,
pub host: Option<Url>,
#[serde(deserialize_with = "crate::util::deserialize_from_str_opt")]
#[serde(deserialize_with = "crate::util::serde::deserialize_from_str_opt")]
pub proxy: Option<Url>,
pub cookie_path: Option<PathBuf>,
}

View File

@@ -29,6 +29,7 @@ use crate::net::tor::os_key;
use crate::net::wifi::WpaCli;
use crate::net::NetController;
use crate::notifications::NotificationManager;
use crate::setup::password_hash;
use crate::shutdown::Shutdown;
use crate::status::{MainStatus, Status};
use crate::system::launch_metrics_task;
@@ -81,6 +82,7 @@ impl RpcContextConfig {
get_id().await?,
&get_hostname().await?,
&os_key(&mut secret_store.acquire().await?).await?,
password_hash(&mut secret_store.acquire().await?).await?,
),
None,
)

View File

@@ -20,7 +20,7 @@ use url::Host;
use crate::db::model::Database;
use crate::hostname::{get_hostname, get_id, get_product_key};
use crate::net::tor::os_key;
use crate::setup::RecoveryStatus;
use crate::setup::{password_hash, RecoveryStatus};
use crate::util::io::from_toml_async_reader;
use crate::util::AsyncFileExt;
use crate::{Error, ResultExt};
@@ -62,6 +62,7 @@ pub struct SetupContextSeed {
pub selected_v2_drive: RwLock<Option<PathBuf>>,
pub cached_product_key: RwLock<Option<Arc<String>>>,
pub recovery_status: RwLock<Option<Result<RecoveryStatus, RpcError>>>,
pub disk_guid: RwLock<Option<Arc<String>>>,
}
#[derive(Clone)]
@@ -80,6 +81,7 @@ impl SetupContext {
selected_v2_drive: RwLock::new(None),
cached_product_key: RwLock::new(None),
recovery_status: RwLock::new(None),
disk_guid: RwLock::new(None),
})))
}
#[instrument(skip(self))]
@@ -95,6 +97,7 @@ impl SetupContext {
get_id().await?,
&get_hostname().await?,
&os_key(&mut secret_store.acquire().await?).await?,
password_hash(&mut secret_store.acquire().await?).await?,
),
None,
)

View File

@@ -1,6 +1,5 @@
use std::collections::BTreeMap;
use chrono::Utc;
use color_eyre::eyre::eyre;
use patch_db::DbHandle;
use rpc_toolkit::command;
@@ -14,7 +13,8 @@ use crate::dependencies::{
};
use crate::s9pk::manifest::PackageId;
use crate::status::MainStatus;
use crate::util::{display_none, display_serializable};
use crate::util::display_none;
use crate::util::serde::display_serializable;
use crate::{Error, ResultExt};
#[command(display(display_none))]

View File

@@ -26,7 +26,8 @@ pub use self::model::DatabaseModel;
use self::util::WithRevision;
use crate::context::RpcContext;
use crate::middleware::auth::{HasValidSession, HashSessionToken};
use crate::util::{display_serializable, GeneralGuard, IoFormat};
use crate::util::serde::{display_serializable, IoFormat};
use crate::util::GeneralGuard;
use crate::{Error, ResultExt};
#[instrument(skip(ctx, ws_fut))]

View File

@@ -32,7 +32,12 @@ pub struct Database {
pub ui: Value,
}
impl Database {
pub fn init(id: String, hostname: &str, tor_key: &TorSecretKeyV3) -> Self {
pub fn init(
id: String,
hostname: &str,
tor_key: &TorSecretKeyV3,
password_hash: String,
) -> Self {
// TODO
Database {
server_info: ServerInfo {
@@ -59,6 +64,7 @@ impl Database {
},
share_stats: false,
update_progress: None,
password_hash,
},
package_data: AllPackageData::default(),
recovered_packages: BTreeMap::new(),
@@ -91,6 +97,7 @@ pub struct ServerInfo {
pub share_stats: bool,
#[model]
pub update_progress: Option<UpdateProgress>,
pub password_hash: String,
}
#[derive(Debug, Deserialize, Serialize)]

View File

@@ -21,7 +21,8 @@ use crate::error::ResultExt;
use crate::s9pk::manifest::{Manifest, PackageId};
use crate::status::health_check::{HealthCheckId, HealthCheckResult};
use crate::status::{MainStatus, Status};
use crate::util::{display_none, display_serializable, Version};
use crate::util::serde::display_serializable;
use crate::util::{display_none, Version};
use crate::volume::Volumes;
use crate::Error;

View File

@@ -4,7 +4,8 @@ use std::path::{Path, PathBuf};
use tokio::process::Command;
use tracing::instrument;
use crate::disk::util::{mount, unmount};
use crate::disk::mount::filesystem::block_dev::mount;
use crate::disk::mount::util::unmount;
use crate::util::Invoke;
use crate::{Error, ResultExt};

View File

@@ -1,25 +1,18 @@
use std::collections::BTreeMap;
use std::path::PathBuf;
use chrono::{DateTime, Utc};
use clap::ArgMatches;
use rpc_toolkit::command;
use serde::{Deserialize, Serialize};
use tracing::instrument;
use self::util::DiskInfo;
use crate::disk::util::{BackupMountGuard, TmpMountGuard};
use crate::s9pk::manifest::PackageId;
use crate::util::{display_serializable, IoFormat, Version};
use crate::util::serde::{display_serializable, IoFormat};
use crate::Error;
pub mod main;
pub mod mount;
pub mod quirks;
pub mod util;
pub const BOOT_RW_PATH: &'static str = "/media/boot-rw";
#[command(subcommands(list, backup_info))]
#[command(subcommands(list))]
pub fn disk() -> Result<(), Error> {
Ok(())
}
@@ -86,72 +79,3 @@ pub async fn list(
) -> Result<Vec<DiskInfo>, Error> {
crate::disk::util::list().await
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct BackupInfo {
pub version: Version,
pub timestamp: Option<DateTime<Utc>>,
pub package_backups: BTreeMap<PackageId, PackageBackupInfo>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct PackageBackupInfo {
pub title: String,
pub version: Version,
pub os_version: Version,
pub timestamp: DateTime<Utc>,
}
fn display_backup_info(info: BackupInfo, matches: &ArgMatches<'_>) {
use prettytable::*;
if matches.is_present("format") {
return display_serializable(info, matches);
}
let mut table = Table::new();
table.add_row(row![bc =>
"ID",
"VERSION",
"OS VERSION",
"TIMESTAMP",
]);
table.add_row(row![
"EMBASSY OS",
info.version.as_str(),
info.version.as_str(),
&if let Some(ts) = &info.timestamp {
ts.to_string()
} else {
"N/A".to_owned()
},
]);
for (id, info) in info.package_backups {
let row = row![
id.as_str(),
info.version.as_str(),
info.os_version.as_str(),
&info.timestamp.to_string(),
];
table.add_row(row);
}
table.print_tty(false);
}
#[command(rename = "backup-info", display(display_backup_info))]
#[instrument(skip(password))]
pub async fn backup_info(
#[arg] logicalname: PathBuf,
#[arg] password: String,
) -> Result<BackupInfo, Error> {
let guard =
BackupMountGuard::mount(TmpMountGuard::mount(logicalname, None).await?, &password).await?;
let res = guard.metadata.clone();
guard.unmount().await?;
Ok(res)
}

View File

@@ -0,0 +1,255 @@
use std::path::{Path, PathBuf};
use color_eyre::eyre::eyre;
use tokio::io::AsyncWriteExt;
use tracing::instrument;
use super::filesystem::ecryptfs::EcryptFS;
use super::guard::{GenericMountGuard, TmpMountGuard};
use super::util::{bind, unmount};
use crate::auth::check_password;
use crate::backup::target::BackupInfo;
use crate::disk::util::EmbassyOsRecoveryInfo;
use crate::middleware::encrypt::{decrypt_slice, encrypt_slice};
use crate::s9pk::manifest::PackageId;
use crate::util::serde::IoFormat;
use crate::util::{AtomicFile, FileLock};
use crate::volume::BACKUP_DIR;
use crate::{Error, ResultExt};
pub struct BackupMountGuard<G: GenericMountGuard> {
backup_disk_mount_guard: Option<G>,
encrypted_guard: Option<TmpMountGuard>,
enc_key: String,
pub unencrypted_metadata: EmbassyOsRecoveryInfo,
pub metadata: BackupInfo,
}
impl<G: GenericMountGuard> BackupMountGuard<G> {
fn backup_disk_path(&self) -> &Path {
if let Some(guard) = &self.backup_disk_mount_guard {
guard.as_ref()
} else {
unreachable!()
}
}
#[instrument(skip(password))]
pub async fn mount(backup_disk_mount_guard: G, password: &str) -> Result<Self, Error> {
let backup_disk_path = backup_disk_mount_guard.as_ref();
let unencrypted_metadata_path =
backup_disk_path.join("EmbassyBackups/unencrypted-metadata.cbor");
let mut unencrypted_metadata: EmbassyOsRecoveryInfo =
if tokio::fs::metadata(&unencrypted_metadata_path)
.await
.is_ok()
{
IoFormat::Cbor.from_slice(
&tokio::fs::read(&unencrypted_metadata_path)
.await
.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
unencrypted_metadata_path.display().to_string(),
)
})?,
)?
} else {
Default::default()
};
let enc_key = if let (Some(hash), Some(wrapped_key)) = (
unencrypted_metadata.password_hash.as_ref(),
unencrypted_metadata.wrapped_key.as_ref(),
) {
let wrapped_key =
base32::decode(base32::Alphabet::RFC4648 { padding: true }, wrapped_key)
.ok_or_else(|| {
Error::new(
eyre!("failed to decode wrapped key"),
crate::ErrorKind::Backup,
)
})?;
check_password(hash, password)?;
String::from_utf8(decrypt_slice(wrapped_key, password))?
} else {
base32::encode(
base32::Alphabet::RFC4648 { padding: false },
&rand::random::<[u8; 32]>()[..],
)
};
if unencrypted_metadata.password_hash.is_none() {
unencrypted_metadata.password_hash = Some(
argon2::hash_encoded(
password.as_bytes(),
&rand::random::<[u8; 16]>()[..],
&argon2::Config::default(),
)
.with_kind(crate::ErrorKind::PasswordHashGeneration)?,
);
}
if unencrypted_metadata.wrapped_key.is_none() {
unencrypted_metadata.wrapped_key = Some(base32::encode(
base32::Alphabet::RFC4648 { padding: true },
&encrypt_slice(&enc_key, password),
));
}
let crypt_path = backup_disk_path.join("EmbassyBackups/crypt");
if tokio::fs::metadata(&crypt_path).await.is_err() {
tokio::fs::create_dir_all(&crypt_path).await.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
crypt_path.display().to_string(),
)
})?;
}
let encrypted_guard = TmpMountGuard::mount(&EcryptFS::new(&crypt_path, &enc_key)).await?;
let metadata_path = encrypted_guard.as_ref().join("metadata.cbor");
let metadata: BackupInfo = if tokio::fs::metadata(&metadata_path).await.is_ok() {
IoFormat::Cbor.from_slice(&tokio::fs::read(&metadata_path).await.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
metadata_path.display().to_string(),
)
})?)?
} else {
Default::default()
};
Ok(Self {
backup_disk_mount_guard: Some(backup_disk_mount_guard),
encrypted_guard: Some(encrypted_guard),
enc_key,
unencrypted_metadata,
metadata,
})
}
pub fn change_password(&mut self, new_password: &str) -> Result<(), Error> {
self.unencrypted_metadata.password_hash = Some(
argon2::hash_encoded(
new_password.as_bytes(),
&rand::random::<[u8; 16]>()[..],
&argon2::Config::default(),
)
.with_kind(crate::ErrorKind::PasswordHashGeneration)?,
);
self.unencrypted_metadata.wrapped_key = Some(base32::encode(
base32::Alphabet::RFC4648 { padding: false },
&encrypt_slice(&self.enc_key, new_password),
));
Ok(())
}
#[instrument(skip(self))]
pub async fn mount_package_backup(
&self,
id: &PackageId,
) -> Result<PackageBackupMountGuard, Error> {
let lock = FileLock::new(Path::new(BACKUP_DIR).join(format!("{}.lock", id)), false).await?;
let mountpoint = Path::new(BACKUP_DIR).join(id);
bind(self.as_ref().join(id), &mountpoint, false).await?;
Ok(PackageBackupMountGuard {
mountpoint: Some(mountpoint),
lock: Some(lock),
})
}
#[instrument(skip(self))]
pub async fn save(&self) -> Result<(), Error> {
let metadata_path = self.as_ref().join("metadata.cbor");
let backup_disk_path = self.backup_disk_path();
let mut file = AtomicFile::new(&metadata_path).await?;
file.write_all(&IoFormat::Cbor.to_vec(&self.metadata)?)
.await?;
file.save().await?;
let unencrypted_metadata_path =
backup_disk_path.join("EmbassyBackups/unencrypted-metadata.cbor");
let mut file = AtomicFile::new(&unencrypted_metadata_path).await?;
file.write_all(&IoFormat::Cbor.to_vec(&self.unencrypted_metadata)?)
.await?;
file.save().await?;
Ok(())
}
#[instrument(skip(self))]
pub async fn unmount(mut self) -> Result<(), Error> {
if let Some(guard) = self.encrypted_guard.take() {
guard.unmount().await?;
}
if let Some(guard) = self.backup_disk_mount_guard.take() {
guard.unmount().await?;
}
Ok(())
}
#[instrument(skip(self))]
pub async fn save_and_unmount(self) -> Result<(), Error> {
self.save().await?;
self.unmount().await?;
Ok(())
}
}
impl<G: GenericMountGuard> AsRef<Path> for BackupMountGuard<G> {
fn as_ref(&self) -> &Path {
if let Some(guard) = &self.encrypted_guard {
guard.as_ref()
} else {
unreachable!()
}
}
}
impl<G: GenericMountGuard> Drop for BackupMountGuard<G> {
fn drop(&mut self) {
let first = self.encrypted_guard.take();
let second = self.backup_disk_mount_guard.take();
tokio::spawn(async move {
if let Some(guard) = first {
guard.unmount().await.unwrap();
}
if let Some(guard) = second {
guard.unmount().await.unwrap();
}
});
}
}
pub struct PackageBackupMountGuard {
mountpoint: Option<PathBuf>,
lock: Option<FileLock>,
}
impl PackageBackupMountGuard {
pub async fn unmount(mut self) -> Result<(), Error> {
if let Some(mountpoint) = self.mountpoint.take() {
unmount(&mountpoint).await?;
}
if let Some(lock) = self.lock.take() {
lock.unlock().await?;
}
Ok(())
}
}
impl AsRef<Path> for PackageBackupMountGuard {
fn as_ref(&self) -> &Path {
if let Some(mountpoint) = &self.mountpoint {
mountpoint
} else {
unreachable!()
}
}
}
impl Drop for PackageBackupMountGuard {
fn drop(&mut self) {
let mountpoint = self.mountpoint.take();
let lock = self.lock.take();
tokio::spawn(async move {
if let Some(mountpoint) = mountpoint {
unmount(&mountpoint).await.unwrap();
}
if let Some(lock) = lock {
lock.unlock().await.unwrap();
}
});
}
}

View File

@@ -0,0 +1,59 @@
use std::os::unix::ffi::OsStrExt;
use std::path::Path;
use async_trait::async_trait;
use digest::generic_array::GenericArray;
use digest::Digest;
use serde::{Deserialize, Serialize};
use sha2::Sha256;
use super::FileSystem;
use crate::util::Invoke;
use crate::{Error, ResultExt};
pub async fn mount(
logicalname: impl AsRef<Path>,
mountpoint: impl AsRef<Path>,
) -> Result<(), Error> {
tokio::fs::create_dir_all(mountpoint.as_ref()).await?;
tokio::process::Command::new("mount")
.arg(logicalname.as_ref())
.arg(mountpoint.as_ref())
.invoke(crate::ErrorKind::Filesystem)
.await?;
Ok(())
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct BlockDev<LogicalName: AsRef<Path>> {
logicalname: LogicalName,
}
impl<LogicalName: AsRef<Path>> BlockDev<LogicalName> {
pub fn new(logicalname: LogicalName) -> Self {
BlockDev { logicalname }
}
}
#[async_trait]
impl<LogicalName: AsRef<Path> + Send + Sync> FileSystem for BlockDev<LogicalName> {
async fn mount<P: AsRef<Path> + Send + Sync>(&self, mountpoint: P) -> Result<(), Error> {
mount(self.logicalname.as_ref(), mountpoint).await
}
async fn source_hash(&self) -> Result<GenericArray<u8, <Sha256 as Digest>::OutputSize>, Error> {
let mut sha = Sha256::new();
sha.update("BlockDev");
sha.update(
tokio::fs::canonicalize(self.logicalname.as_ref())
.await
.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
self.logicalname.as_ref().display().to_string(),
)
})?
.as_os_str()
.as_bytes(),
);
Ok(sha.finalize())
}
}

View File

@@ -0,0 +1,93 @@
use std::net::IpAddr;
use std::os::unix::ffi::OsStrExt;
use std::path::{Path, PathBuf};
use async_trait::async_trait;
use digest::generic_array::GenericArray;
use digest::Digest;
use serde::{Deserialize, Serialize};
use sha2::Sha256;
use tokio::process::Command;
use tracing::instrument;
use super::FileSystem;
use crate::disk::mount::guard::TmpMountGuard;
use crate::util::Invoke;
use crate::Error;
#[instrument(skip(path, password, mountpoint))]
pub async fn mount_cifs(
hostname: &str,
path: impl AsRef<Path>,
username: &str,
password: Option<&str>,
mountpoint: impl AsRef<Path>,
) -> Result<(), Error> {
tokio::fs::create_dir_all(mountpoint.as_ref()).await?;
let ip: IpAddr = String::from_utf8(
Command::new("nmblookup")
.arg(hostname)
.invoke(crate::ErrorKind::Network)
.await?,
)?
.split(" ")
.next()
.unwrap()
.parse()?;
let absolute_path = Path::new("/").join(path.as_ref());
Command::new("mount")
.arg("-t")
.arg("cifs")
.arg("-o")
.arg(format!(
"username={}{}",
username,
password
.map(|p| format!(",password={}", p))
.unwrap_or_default()
))
.arg(format!("//{}{}", ip, absolute_path.display()))
.arg(mountpoint.as_ref())
.invoke(crate::ErrorKind::Filesystem)
.await?;
Ok(())
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct Cifs {
pub hostname: String,
pub path: PathBuf,
pub username: String,
pub password: Option<String>,
}
impl Cifs {
pub async fn mountable(&self) -> Result<(), Error> {
let guard = TmpMountGuard::mount(self).await?;
guard.unmount().await?;
Ok(())
}
}
#[async_trait]
impl FileSystem for Cifs {
async fn mount<P: AsRef<std::path::Path> + Send + Sync>(
&self,
mountpoint: P,
) -> Result<(), Error> {
mount_cifs(
&self.hostname,
&self.path,
&self.username,
self.password.as_ref().map(|p| p.as_str()),
mountpoint,
)
.await
}
async fn source_hash(&self) -> Result<GenericArray<u8, <Sha256 as Digest>::OutputSize>, Error> {
let mut sha = Sha256::new();
sha.update("Cifs");
sha.update(self.hostname.as_bytes());
sha.update(self.path.as_os_str().as_bytes());
Ok(sha.finalize())
}
}

View File

@@ -0,0 +1,79 @@
use std::os::unix::ffi::OsStrExt;
use std::path::Path;
use async_trait::async_trait;
use color_eyre::eyre::eyre;
use digest::generic_array::GenericArray;
use digest::Digest;
use sha2::Sha256;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use super::FileSystem;
use crate::{Error, ResultExt};
pub async fn mount_ecryptfs<P0: AsRef<Path>, P1: AsRef<Path>>(
src: P0,
dst: P1,
key: &str,
) -> Result<(), Error> {
tokio::fs::create_dir_all(dst.as_ref()).await?;
let mut ecryptfs = tokio::process::Command::new("mount")
.arg("-t")
.arg("ecryptfs")
.arg(src.as_ref())
.arg(dst.as_ref())
.arg("-o")
// for more information `man ecryptfs`
.arg(format!("key=passphrase,passwd={},ecryptfs_cipher=aes,ecryptfs_key_bytes=32,ecryptfs_passthrough=n,ecryptfs_enable_filename_crypto=y", key))
.stdin(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.spawn()?;
let mut stdin = ecryptfs.stdin.take().unwrap();
let mut stderr = ecryptfs.stderr.take().unwrap();
stdin.write_all(b"\nyes\nno").await?;
stdin.flush().await?;
stdin.shutdown().await?;
drop(stdin);
let mut err = String::new();
stderr.read_to_string(&mut err).await?;
if !ecryptfs.wait().await?.success() {
Err(Error::new(eyre!("{}", err), crate::ErrorKind::Filesystem))
} else {
Ok(())
}
}
pub struct EcryptFS<EncryptedDir: AsRef<Path>, Key: AsRef<str>> {
encrypted_dir: EncryptedDir,
key: Key,
}
impl<EncryptedDir: AsRef<Path>, Key: AsRef<str>> EcryptFS<EncryptedDir, Key> {
pub fn new(encrypted_dir: EncryptedDir, key: Key) -> Self {
EcryptFS { encrypted_dir, key }
}
}
#[async_trait]
impl<EncryptedDir: AsRef<Path> + Send + Sync, Key: AsRef<str> + Send + Sync> FileSystem
for EcryptFS<EncryptedDir, Key>
{
async fn mount<P: AsRef<Path> + Send + Sync>(&self, mountpoint: P) -> Result<(), Error> {
mount_ecryptfs(self.encrypted_dir.as_ref(), mountpoint, self.key.as_ref()).await
}
async fn source_hash(&self) -> Result<GenericArray<u8, <Sha256 as Digest>::OutputSize>, Error> {
let mut sha = Sha256::new();
sha.update("EcryptFS");
sha.update(
tokio::fs::canonicalize(self.encrypted_dir.as_ref())
.await
.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
self.encrypted_dir.as_ref().display().to_string(),
)
})?
.as_os_str()
.as_bytes(),
);
Ok(sha.finalize())
}
}

View File

@@ -0,0 +1,42 @@
use std::path::Path;
use async_trait::async_trait;
use digest::generic_array::GenericArray;
use digest::Digest;
use sha2::Sha256;
use super::FileSystem;
use crate::util::Invoke;
use crate::Error;
pub async fn mount_label(label: &str, mountpoint: impl AsRef<Path>) -> Result<(), Error> {
tokio::fs::create_dir_all(mountpoint.as_ref()).await?;
tokio::process::Command::new("mount")
.arg("-L")
.arg(label)
.arg(mountpoint.as_ref())
.invoke(crate::ErrorKind::Filesystem)
.await?;
Ok(())
}
pub struct Label<S: AsRef<str>> {
label: S,
}
impl<S: AsRef<str>> Label<S> {
pub fn new(label: S) -> Self {
Label { label }
}
}
#[async_trait]
impl<S: AsRef<str> + Send + Sync> FileSystem for Label<S> {
async fn mount<P: AsRef<Path> + Send + Sync>(&self, mountpoint: P) -> Result<(), Error> {
mount_label(self.label.as_ref(), mountpoint).await
}
async fn source_hash(&self) -> Result<GenericArray<u8, <Sha256 as Digest>::OutputSize>, Error> {
let mut sha = Sha256::new();
sha.update("Label");
sha.update(self.label.as_ref().as_bytes());
Ok(sha.finalize())
}
}

View File

@@ -0,0 +1,19 @@
use std::path::Path;
use async_trait::async_trait;
use digest::generic_array::GenericArray;
use digest::Digest;
use sha2::Sha256;
use crate::Error;
pub mod block_dev;
pub mod cifs;
pub mod ecryptfs;
pub mod label;
#[async_trait]
pub trait FileSystem {
async fn mount<P: AsRef<Path> + Send + Sync>(&self, mountpoint: P) -> Result<(), Error>;
async fn source_hash(&self) -> Result<GenericArray<u8, <Sha256 as Digest>::OutputSize>, Error>;
}

View File

@@ -0,0 +1,114 @@
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Weak};
use lazy_static::lazy_static;
use tokio::sync::Mutex;
use tracing::instrument;
use super::filesystem::FileSystem;
use super::util::unmount;
use crate::Error;
pub const TMP_MOUNTPOINT: &'static str = "/media/embassy-os/tmp";
#[async_trait::async_trait]
pub trait GenericMountGuard: AsRef<Path> + std::fmt::Debug + Send + Sync + 'static {
async fn unmount(mut self) -> Result<(), Error>;
}
#[derive(Debug)]
pub struct MountGuard {
mountpoint: PathBuf,
mounted: bool,
}
impl MountGuard {
pub async fn mount(
filesystem: &impl FileSystem,
mountpoint: impl AsRef<Path>,
) -> Result<Self, Error> {
let mountpoint = mountpoint.as_ref().to_owned();
filesystem.mount(&mountpoint).await?;
Ok(MountGuard {
mountpoint,
mounted: true,
})
}
pub async fn unmount(mut self) -> Result<(), Error> {
if self.mounted {
unmount(&self.mountpoint).await?;
self.mounted = false;
}
Ok(())
}
}
impl AsRef<Path> for MountGuard {
fn as_ref(&self) -> &Path {
&self.mountpoint
}
}
impl Drop for MountGuard {
fn drop(&mut self) {
if self.mounted {
let mountpoint = std::mem::take(&mut self.mountpoint);
tokio::spawn(async move { unmount(mountpoint).await.unwrap() });
}
}
}
#[async_trait::async_trait]
impl GenericMountGuard for MountGuard {
async fn unmount(mut self) -> Result<(), Error> {
MountGuard::unmount(self).await
}
}
async fn tmp_mountpoint(source: &impl FileSystem) -> Result<PathBuf, Error> {
Ok(Path::new(TMP_MOUNTPOINT).join(base32::encode(
base32::Alphabet::RFC4648 { padding: false },
&source.source_hash().await?,
)))
}
lazy_static! {
static ref TMP_MOUNTS: Mutex<BTreeMap<PathBuf, Weak<MountGuard>>> = Mutex::new(BTreeMap::new());
}
#[derive(Debug)]
pub struct TmpMountGuard {
guard: Arc<MountGuard>,
}
impl TmpMountGuard {
#[instrument(skip(filesystem))]
pub async fn mount(filesystem: &impl FileSystem) -> Result<Self, Error> {
let mountpoint = tmp_mountpoint(filesystem).await?;
let mut tmp_mounts = TMP_MOUNTS.lock().await;
if !tmp_mounts.contains_key(&mountpoint) {
tmp_mounts.insert(mountpoint.clone(), Weak::new());
}
let weak_slot = tmp_mounts.get_mut(&mountpoint).unwrap();
if let Some(guard) = weak_slot.upgrade() {
Ok(TmpMountGuard { guard })
} else {
let guard = Arc::new(MountGuard::mount(filesystem, &mountpoint).await?);
*weak_slot = Arc::downgrade(&guard);
Ok(TmpMountGuard { guard })
}
}
pub async fn unmount(self) -> Result<(), Error> {
if let Ok(guard) = Arc::try_unwrap(self.guard) {
guard.unmount().await?;
}
Ok(())
}
}
impl AsRef<Path> for TmpMountGuard {
fn as_ref(&self) -> &Path {
(&*self.guard).as_ref()
}
}
#[async_trait::async_trait]
impl GenericMountGuard for TmpMountGuard {
async fn unmount(mut self) -> Result<(), Error> {
TmpMountGuard::unmount(self).await
}
}

View File

@@ -0,0 +1,4 @@
pub mod backup;
pub mod filesystem;
pub mod guard;
pub mod util;

View File

@@ -0,0 +1,60 @@
use std::path::Path;
use tracing::instrument;
use crate::util::Invoke;
use crate::{Error, ResultExt};
#[instrument(skip(src, dst))]
pub async fn bind<P0: AsRef<Path>, P1: AsRef<Path>>(
src: P0,
dst: P1,
read_only: bool,
) -> Result<(), Error> {
tracing::info!(
"Binding {} to {}",
src.as_ref().display(),
dst.as_ref().display()
);
let is_mountpoint = tokio::process::Command::new("mountpoint")
.arg(dst.as_ref())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.await?;
if is_mountpoint.success() {
unmount(dst.as_ref()).await?;
}
tokio::fs::create_dir_all(&src).await?;
tokio::fs::create_dir_all(&dst).await?;
let mut mount_cmd = tokio::process::Command::new("mount");
mount_cmd.arg("--bind");
if read_only {
mount_cmd.arg("-o").arg("ro");
}
mount_cmd
.arg(src.as_ref())
.arg(dst.as_ref())
.invoke(crate::ErrorKind::Filesystem)
.await?;
Ok(())
}
#[instrument(skip(mountpoint))]
pub async fn unmount<P: AsRef<Path>>(mountpoint: P) -> Result<(), Error> {
tracing::debug!("Unmounting {}.", mountpoint.as_ref().display());
tokio::process::Command::new("umount")
.arg("-l")
.arg(mountpoint.as_ref())
.invoke(crate::ErrorKind::Filesystem)
.await?;
tokio::fs::remove_dir_all(mountpoint.as_ref())
.await
.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
format!("rm {}", mountpoint.as_ref().display()),
)
})?;
Ok(())
}

View File

@@ -1,13 +1,9 @@
use std::collections::BTreeMap;
use std::os::unix::prelude::OsStrExt;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Weak};
use color_eyre::eyre::{self, eyre};
use digest::Digest;
use futures::TryStreamExt;
use indexmap::IndexSet;
use lazy_static::lazy_static;
use nom::bytes::complete::{tag, take_till1};
use nom::character::complete::multispace1;
use nom::character::is_space;
@@ -17,23 +13,17 @@ use nom::IResult;
use regex::Regex;
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::process::Command;
use tokio::sync::Mutex;
use tracing::instrument;
use super::mount::filesystem::block_dev::BlockDev;
use super::mount::guard::TmpMountGuard;
use super::quirks::{fetch_quirks, save_quirks, update_quirks};
use super::BackupInfo;
use crate::auth::check_password;
use crate::middleware::encrypt::{decrypt_slice, encrypt_slice};
use crate::s9pk::manifest::PackageId;
use crate::util::io::from_yaml_async_reader;
use crate::util::{AtomicFile, FileLock, Invoke, IoFormat, Version};
use crate::volume::BACKUP_DIR;
use crate::util::serde::IoFormat;
use crate::util::{Invoke, Version};
use crate::{Error, ResultExt as _};
pub const TMP_MOUNTPOINT: &'static str = "/media/embassy-os/tmp";
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct DiskInfo {
@@ -205,6 +195,33 @@ pub async fn pvscan() -> Result<BTreeMap<PathBuf, Option<String>>, Error> {
Ok(parse_pvscan_output(pvscan_out_str))
}
pub async fn recovery_info(
mountpoint: impl AsRef<Path>,
) -> Result<Option<EmbassyOsRecoveryInfo>, Error> {
let backup_unencrypted_metadata_path = mountpoint
.as_ref()
.join("EmbassyBackups/unencrypted-metadata.cbor");
if tokio::fs::metadata(&backup_unencrypted_metadata_path)
.await
.is_ok()
{
Ok(Some(
IoFormat::Cbor.from_slice(
&tokio::fs::read(&backup_unencrypted_metadata_path)
.await
.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
backup_unencrypted_metadata_path.display().to_string(),
)
})?,
)?,
))
} else {
Ok(None)
}
}
#[instrument]
pub async fn list() -> Result<Vec<DiskInfo>, Error> {
let mut quirks = fetch_quirks().await?;
@@ -292,7 +309,7 @@ pub async fn list() -> Result<Vec<DiskInfo>, Error> {
.unwrap_or_default();
let mut used = None;
match TmpMountGuard::mount(&part, None).await {
match TmpMountGuard::mount(&BlockDev::new(&part)).await {
Err(e) => tracing::warn!("Could not collect usage information: {}", e.source),
Ok(mount_guard) => {
used = get_used(&mount_guard)
@@ -305,38 +322,17 @@ pub async fn list() -> Result<Vec<DiskInfo>, Error> {
)
})
.ok();
let backup_unencrypted_metadata_path = mount_guard
.as_ref()
.join("EmbassyBackups/unencrypted-metadata.cbor");
if tokio::fs::metadata(&backup_unencrypted_metadata_path)
.await
.is_ok()
{
embassy_os = match (|| async {
IoFormat::Cbor.from_slice(
&tokio::fs::read(&backup_unencrypted_metadata_path)
.await
.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
backup_unencrypted_metadata_path
.display()
.to_string(),
)
})?,
)
})()
.await
{
Ok(a) => Some(a),
Err(e) => {
tracing::error!(
"Error fetching unencrypted backup metadata: {}",
e
);
None
}
};
if let Some(recovery_info) = match recovery_info(&mount_guard).await {
Ok(a) => a,
Err(e) => {
tracing::error!(
"Error fetching unencrypted backup metadata: {}",
e
);
None
}
} {
embassy_os = Some(recovery_info)
} else if label.as_deref() == Some("rootfs") {
let version_path = mount_guard.as_ref().join("root/appmgr/version");
if tokio::fs::metadata(&version_path).await.is_ok() {
@@ -377,497 +373,6 @@ pub async fn list() -> Result<Vec<DiskInfo>, Error> {
Ok(res)
}
#[instrument(skip(logicalname, mountpoint))]
pub async fn mount(
logicalname: impl AsRef<Path>,
mountpoint: impl AsRef<Path>,
) -> Result<(), Error> {
let is_mountpoint = tokio::process::Command::new("mountpoint")
.arg(mountpoint.as_ref())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.await?;
if is_mountpoint.success() {
unmount(mountpoint.as_ref()).await?;
}
tokio::fs::create_dir_all(mountpoint.as_ref()).await?;
let mount_output = tokio::process::Command::new("mount")
.arg(logicalname.as_ref())
.arg(mountpoint.as_ref())
.output()
.await?;
crate::ensure_code!(
mount_output.status.success(),
crate::ErrorKind::Filesystem,
"Error Mounting {} to {}: {}",
logicalname.as_ref().display(),
mountpoint.as_ref().display(),
std::str::from_utf8(&mount_output.stderr).unwrap_or("Unknown Error")
);
Ok(())
}
#[instrument(skip(src, dst, key))]
pub async fn mount_ecryptfs<P0: AsRef<Path>, P1: AsRef<Path>>(
src: P0,
dst: P1,
key: &str,
) -> Result<(), Error> {
let is_mountpoint = tokio::process::Command::new("mountpoint")
.arg(dst.as_ref())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.await?;
if is_mountpoint.success() {
unmount(dst.as_ref()).await?;
}
tokio::fs::create_dir_all(dst.as_ref()).await?;
let mut ecryptfs = tokio::process::Command::new("mount")
.arg("-t")
.arg("ecryptfs")
.arg(src.as_ref())
.arg(dst.as_ref())
.arg("-o")
.arg(format!("key=passphrase,passwd={},ecryptfs_cipher=aes,ecryptfs_key_bytes=32,ecryptfs_passthrough=n,ecryptfs_enable_filename_crypto=y", key))
.stdin(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.spawn()?;
let mut stdin = ecryptfs.stdin.take().unwrap();
let mut stderr = ecryptfs.stderr.take().unwrap();
stdin.write_all(b"\nyes\nno").await?;
stdin.flush().await?;
stdin.shutdown().await?;
drop(stdin);
let mut err = String::new();
stderr.read_to_string(&mut err).await?;
if !ecryptfs.wait().await?.success() {
Err(Error::new(eyre!("{}", err), crate::ErrorKind::Filesystem))
} else {
Ok(())
}
}
#[instrument(skip(src, dst))]
pub async fn bind<P0: AsRef<Path>, P1: AsRef<Path>>(
src: P0,
dst: P1,
read_only: bool,
) -> Result<(), Error> {
tracing::info!(
"Binding {} to {}",
src.as_ref().display(),
dst.as_ref().display()
);
let is_mountpoint = tokio::process::Command::new("mountpoint")
.arg(dst.as_ref())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.await?;
if is_mountpoint.success() {
unmount(dst.as_ref()).await?;
}
tokio::fs::create_dir_all(&src).await?;
tokio::fs::create_dir_all(&dst).await?;
let mut mount_cmd = tokio::process::Command::new("mount");
mount_cmd.arg("--bind");
if read_only {
mount_cmd.arg("-o").arg("ro");
}
mount_cmd
.arg(src.as_ref())
.arg(dst.as_ref())
.invoke(crate::ErrorKind::Filesystem)
.await?;
Ok(())
}
#[instrument(skip(mountpoint))]
pub async fn unmount<P: AsRef<Path>>(mountpoint: P) -> Result<(), Error> {
tracing::debug!("Unmounting {}.", mountpoint.as_ref().display());
let umount_output = tokio::process::Command::new("umount")
.arg("-l")
.arg(mountpoint.as_ref())
.output()
.await?;
crate::ensure_code!(
umount_output.status.success(),
crate::ErrorKind::Filesystem,
"Error Unmounting Drive: {}: {}",
mountpoint.as_ref().display(),
std::str::from_utf8(&umount_output.stderr).unwrap_or("Unknown Error")
);
tokio::fs::remove_dir_all(mountpoint.as_ref())
.await
.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
format!("rm {}", mountpoint.as_ref().display()),
)
})?;
Ok(())
}
#[async_trait::async_trait]
pub trait GenericMountGuard: AsRef<Path> + std::fmt::Debug + Send + Sync + 'static {
async fn unmount(mut self) -> Result<(), Error>;
}
#[derive(Debug)]
pub struct MountGuard {
mountpoint: PathBuf,
mounted: bool,
}
impl MountGuard {
pub async fn mount(
logicalname: impl AsRef<Path>,
mountpoint: impl AsRef<Path>,
encryption_key: Option<&str>,
) -> Result<Self, Error> {
let mountpoint = mountpoint.as_ref().to_owned();
if let Some(key) = encryption_key {
mount_ecryptfs(logicalname, &mountpoint, key).await?;
} else {
mount(logicalname, &mountpoint).await?;
}
Ok(MountGuard {
mountpoint,
mounted: true,
})
}
pub async fn unmount(mut self) -> Result<(), Error> {
if self.mounted {
unmount(&self.mountpoint).await?;
self.mounted = false;
}
Ok(())
}
}
impl AsRef<Path> for MountGuard {
fn as_ref(&self) -> &Path {
&self.mountpoint
}
}
impl Drop for MountGuard {
fn drop(&mut self) {
if self.mounted {
let mountpoint = std::mem::take(&mut self.mountpoint);
tokio::spawn(async move { unmount(mountpoint).await.unwrap() });
}
}
}
#[async_trait::async_trait]
impl GenericMountGuard for MountGuard {
async fn unmount(mut self) -> Result<(), Error> {
MountGuard::unmount(self).await
}
}
async fn tmp_mountpoint(source: impl AsRef<Path>) -> Result<PathBuf, Error> {
Ok(Path::new(TMP_MOUNTPOINT).join(base32::encode(
base32::Alphabet::RFC4648 { padding: false },
&sha2::Sha256::digest(
tokio::fs::canonicalize(&source)
.await
.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
source.as_ref().display().to_string(),
)
})?
.as_os_str()
.as_bytes(),
),
)))
}
lazy_static! {
static ref TMP_MOUNTS: Mutex<BTreeMap<PathBuf, Weak<MountGuard>>> = Mutex::new(BTreeMap::new());
}
#[derive(Debug)]
pub struct TmpMountGuard {
guard: Arc<MountGuard>,
}
impl TmpMountGuard {
#[instrument(skip(logicalname, encryption_key))]
pub async fn mount(
logicalname: impl AsRef<Path>,
encryption_key: Option<&str>,
) -> Result<Self, Error> {
let mountpoint = tmp_mountpoint(&logicalname).await?;
let mut tmp_mounts = TMP_MOUNTS.lock().await;
if !tmp_mounts.contains_key(&mountpoint) {
tmp_mounts.insert(mountpoint.clone(), Weak::new());
}
let weak_slot = tmp_mounts.get_mut(&mountpoint).unwrap();
if let Some(guard) = weak_slot.upgrade() {
Ok(TmpMountGuard { guard })
} else {
let guard =
Arc::new(MountGuard::mount(logicalname, &mountpoint, encryption_key).await?);
*weak_slot = Arc::downgrade(&guard);
Ok(TmpMountGuard { guard })
}
}
pub async fn unmount(self) -> Result<(), Error> {
if let Ok(guard) = Arc::try_unwrap(self.guard) {
guard.unmount().await?;
}
Ok(())
}
}
impl AsRef<Path> for TmpMountGuard {
fn as_ref(&self) -> &Path {
(&*self.guard).as_ref()
}
}
#[async_trait::async_trait]
impl GenericMountGuard for TmpMountGuard {
async fn unmount(mut self) -> Result<(), Error> {
TmpMountGuard::unmount(self).await
}
}
pub struct BackupMountGuard<G: GenericMountGuard> {
backup_disk_mount_guard: Option<G>,
encrypted_guard: Option<TmpMountGuard>,
enc_key: String,
pub unencrypted_metadata: EmbassyOsRecoveryInfo,
pub metadata: BackupInfo,
}
impl<G: GenericMountGuard> BackupMountGuard<G> {
fn backup_disk_path(&self) -> &Path {
if let Some(guard) = &self.backup_disk_mount_guard {
guard.as_ref()
} else {
unreachable!()
}
}
#[instrument(skip(password))]
pub async fn mount(backup_disk_mount_guard: G, password: &str) -> Result<Self, Error> {
let backup_disk_path = backup_disk_mount_guard.as_ref();
let unencrypted_metadata_path =
backup_disk_path.join("EmbassyBackups/unencrypted-metadata.cbor");
let mut unencrypted_metadata: EmbassyOsRecoveryInfo =
if tokio::fs::metadata(&unencrypted_metadata_path)
.await
.is_ok()
{
IoFormat::Cbor.from_slice(
&tokio::fs::read(&unencrypted_metadata_path)
.await
.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
unencrypted_metadata_path.display().to_string(),
)
})?,
)?
} else {
Default::default()
};
let enc_key = if let (Some(hash), Some(wrapped_key)) = (
unencrypted_metadata.password_hash.as_ref(),
unencrypted_metadata.wrapped_key.as_ref(),
) {
let wrapped_key =
base32::decode(base32::Alphabet::RFC4648 { padding: true }, wrapped_key)
.ok_or_else(|| {
Error::new(
eyre!("failed to decode wrapped key"),
crate::ErrorKind::Backup,
)
})?;
check_password(hash, password)?;
String::from_utf8(decrypt_slice(wrapped_key, password))?
} else {
base32::encode(
base32::Alphabet::RFC4648 { padding: false },
&rand::random::<[u8; 32]>()[..],
)
};
if unencrypted_metadata.password_hash.is_none() {
unencrypted_metadata.password_hash = Some(
argon2::hash_encoded(
password.as_bytes(),
&rand::random::<[u8; 16]>()[..],
&argon2::Config::default(),
)
.with_kind(crate::ErrorKind::PasswordHashGeneration)?,
);
}
if unencrypted_metadata.wrapped_key.is_none() {
unencrypted_metadata.wrapped_key = Some(base32::encode(
base32::Alphabet::RFC4648 { padding: true },
&encrypt_slice(&enc_key, password),
));
}
let crypt_path = backup_disk_path.join("EmbassyBackups/crypt");
if tokio::fs::metadata(&crypt_path).await.is_err() {
tokio::fs::create_dir_all(&crypt_path).await.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
crypt_path.display().to_string(),
)
})?;
}
let encrypted_guard = TmpMountGuard::mount(&crypt_path, Some(&enc_key)).await?;
let metadata_path = encrypted_guard.as_ref().join("metadata.cbor");
let metadata: BackupInfo = if tokio::fs::metadata(&metadata_path).await.is_ok() {
IoFormat::Cbor.from_slice(&tokio::fs::read(&metadata_path).await.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
metadata_path.display().to_string(),
)
})?)?
} else {
Default::default()
};
Ok(Self {
backup_disk_mount_guard: Some(backup_disk_mount_guard),
encrypted_guard: Some(encrypted_guard),
enc_key,
unencrypted_metadata,
metadata,
})
}
pub fn change_password(&mut self, new_password: &str) -> Result<(), Error> {
self.unencrypted_metadata.password_hash = Some(
argon2::hash_encoded(
new_password.as_bytes(),
&rand::random::<[u8; 16]>()[..],
&argon2::Config::default(),
)
.with_kind(crate::ErrorKind::PasswordHashGeneration)?,
);
self.unencrypted_metadata.wrapped_key = Some(base32::encode(
base32::Alphabet::RFC4648 { padding: false },
&encrypt_slice(&self.enc_key, new_password),
));
Ok(())
}
#[instrument(skip(self))]
pub async fn mount_package_backup(
&self,
id: &PackageId,
) -> Result<PackageBackupMountGuard, Error> {
let lock = FileLock::new(Path::new(BACKUP_DIR).join(format!("{}.lock", id)), false).await?;
let mountpoint = Path::new(BACKUP_DIR).join(id);
bind(self.as_ref().join(id), &mountpoint, false).await?;
Ok(PackageBackupMountGuard {
mountpoint: Some(mountpoint),
lock: Some(lock),
})
}
#[instrument(skip(self))]
pub async fn save(&self) -> Result<(), Error> {
let metadata_path = self.as_ref().join("metadata.cbor");
let backup_disk_path = self.backup_disk_path();
let mut file = AtomicFile::new(&metadata_path).await?;
file.write_all(&IoFormat::Cbor.to_vec(&self.metadata)?)
.await?;
file.save().await?;
let unencrypted_metadata_path =
backup_disk_path.join("EmbassyBackups/unencrypted-metadata.cbor");
let mut file = AtomicFile::new(&unencrypted_metadata_path).await?;
file.write_all(&IoFormat::Cbor.to_vec(&self.unencrypted_metadata)?)
.await?;
file.save().await?;
Ok(())
}
#[instrument(skip(self))]
pub async fn unmount(mut self) -> Result<(), Error> {
if let Some(guard) = self.encrypted_guard.take() {
guard.unmount().await?;
}
if let Some(guard) = self.backup_disk_mount_guard.take() {
guard.unmount().await?;
}
Ok(())
}
#[instrument(skip(self))]
pub async fn save_and_unmount(self) -> Result<(), Error> {
self.save().await?;
self.unmount().await?;
Ok(())
}
}
impl<G: GenericMountGuard> AsRef<Path> for BackupMountGuard<G> {
fn as_ref(&self) -> &Path {
if let Some(guard) = &self.encrypted_guard {
guard.as_ref()
} else {
unreachable!()
}
}
}
impl<G: GenericMountGuard> Drop for BackupMountGuard<G> {
fn drop(&mut self) {
let first = self.encrypted_guard.take();
let second = self.backup_disk_mount_guard.take();
tokio::spawn(async move {
if let Some(guard) = first {
guard.unmount().await.unwrap();
}
if let Some(guard) = second {
guard.unmount().await.unwrap();
}
});
}
}
pub struct PackageBackupMountGuard {
mountpoint: Option<PathBuf>,
lock: Option<FileLock>,
}
impl PackageBackupMountGuard {
pub async fn unmount(mut self) -> Result<(), Error> {
if let Some(mountpoint) = self.mountpoint.take() {
unmount(&mountpoint).await?;
}
if let Some(lock) = self.lock.take() {
lock.unlock().await?;
}
Ok(())
}
}
impl AsRef<Path> for PackageBackupMountGuard {
fn as_ref(&self) -> &Path {
if let Some(mountpoint) = &self.mountpoint {
mountpoint
} else {
unreachable!()
}
}
}
impl Drop for PackageBackupMountGuard {
fn drop(&mut self) {
let mountpoint = self.mountpoint.take();
let lock = self.lock.take();
tokio::spawn(async move {
if let Some(mountpoint) = mountpoint {
unmount(&mountpoint).await.unwrap();
}
if let Some(lock) = lock {
lock.unlock().await.unwrap();
}
});
}
}
fn parse_pvscan_output(pvscan_output: &str) -> BTreeMap<PathBuf, Option<String>> {
fn parse_line(line: &str) -> IResult<&str, (&str, Option<&str>)> {
let pv_parse = preceded(

View File

@@ -61,6 +61,7 @@ pub enum ErrorKind {
Duplicate = 53,
MultipleErrors = 54,
Incoherent = 55,
InvalidBackupTargetId = 56,
}
impl ErrorKind {
pub fn as_str(&self) -> &'static str {
@@ -121,6 +122,7 @@ impl ErrorKind {
Duplicate => "Duplication Error",
MultipleErrors => "Multiple Errors",
Incoherent => "Incoherent",
InvalidBackupTargetId => "Invalid Backup Target ID",
}
}
}
@@ -336,7 +338,7 @@ where
macro_rules! ensure_code {
($x:expr, $c:expr, $fmt:expr $(, $arg:expr)*) => {
if !($x) {
return Err(crate::Error::new(eyre!($fmt, $($arg, )*), $c));
return Err(crate::Error::new(color_eyre::eyre::eyre!($fmt, $($arg, )*), $c));
}
};
}

View File

@@ -12,7 +12,7 @@ pub async fn init(cfg: &RpcContextConfig) -> Result<(), Error> {
if tokio::fs::metadata(&log_dir).await.is_err() {
tokio::fs::create_dir_all(&log_dir).await?;
}
crate::disk::util::bind(&log_dir, "/var/log/journal", false).await?;
crate::disk::mount::util::bind(&log_dir, "/var/log/journal", false).await?;
Command::new("systemctl")
.arg("restart")
.arg("systemd-journald")
@@ -38,7 +38,7 @@ pub async fn init(cfg: &RpcContextConfig) -> Result<(), Error> {
.arg("docker")
.invoke(crate::ErrorKind::Docker)
.await?;
crate::disk::util::bind(&tmp_docker, "/var/lib/docker", false).await?;
crate::disk::mount::util::bind(&tmp_docker, "/var/lib/docker", false).await?;
Command::new("systemctl")
.arg("reset-failed")
.arg("docker")
@@ -60,8 +60,6 @@ pub async fn init(cfg: &RpcContextConfig) -> Result<(), Error> {
crate::ssh::sync_keys_from_db(&secret_store, "/root/.ssh/authorized_keys").await?;
tracing::info!("Synced SSH Keys");
crate::hostname::sync_hostname().await?;
tracing::info!("Synced Hostname");
crate::net::wifi::synchronize_wpa_supplicant_conf(&cfg.datadir().join("main")).await?;
tracing::info!("Synchronized wpa_supplicant.conf");

View File

@@ -4,7 +4,8 @@ use rpc_toolkit::command;
use crate::s9pk::manifest::Manifest;
use crate::s9pk::reader::S9pkReader;
use crate::util::{display_none, display_serializable, IoFormat};
use crate::util::display_none;
use crate::util::serde::{display_serializable, IoFormat};
use crate::Error;
#[command(subcommands(hash, manifest, license, icon, instructions, docker_images))]

View File

@@ -42,7 +42,8 @@ use crate::s9pk::manifest::{Manifest, PackageId};
use crate::s9pk::reader::S9pkReader;
use crate::status::{MainStatus, Status};
use crate::util::io::copy_and_shutdown;
use crate::util::{display_none, display_serializable, AsyncFileExt, IoFormat, Version};
use crate::util::serde::{display_serializable, IoFormat};
use crate::util::{display_none, AsyncFileExt, Version};
use crate::version::{Current, VersionT};
use crate::volume::asset_dir;
use crate::{Error, ErrorKind, ResultExt};

View File

@@ -6,7 +6,8 @@ use rpc_toolkit::command;
use crate::context::RpcContext;
use crate::dependencies::{break_transitive, BreakageRes, DependencyError};
use crate::s9pk::manifest::PackageId;
use crate::util::{display_serializable, Version};
use crate::util::serde::display_serializable;
use crate::util::Version;
use crate::Error;
#[command(subcommands(dry))]

View File

@@ -15,7 +15,7 @@ use tracing::instrument;
use crate::action::docker::DockerAction;
use crate::error::ResultExt;
use crate::s9pk::manifest::PackageId;
use crate::util::Reversible;
use crate::util::serde::Reversible;
use crate::Error;
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]

View File

@@ -13,7 +13,7 @@ use tracing::instrument;
use crate::db::model::{InterfaceAddressMap, InterfaceAddresses};
use crate::id::Id;
use crate::s9pk::manifest::PackageId;
use crate::util::Port;
use crate::util::serde::Port;
use crate::Error;
#[derive(Clone, Debug, Deserialize, Serialize)]

View File

@@ -1,28 +0,0 @@
map $http_x_forwarded_proto $real_proto {{
ext+onions ext+onions;
ext+onion ext+onion;
https https;
http http;
default $scheme;
}}
server {{
listen 443 ssl;
server_name .{hostname}.local;
ssl_certificate /root/appmgr/apps/{app_id}/cert-local.fullchain.crt.pem;
ssl_certificate_key /root/appmgr/apps/{app_id}/cert-local.key.pem;
location / {{
proxy_pass http://{app_ip}:{internal_port}/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $real_proto;
client_max_body_size 0;
proxy_request_buffering off;
proxy_buffering off;
}}
}}
server {{
listen 80;
server_name .{hostname}.local;
return 301 https://$host$request_uri;
}}

View File

@@ -11,7 +11,8 @@ use super::interface::{InterfaceId, LanPortConfig};
use super::ssl::SslManager;
use crate::hostname::get_hostname;
use crate::s9pk::manifest::PackageId;
use crate::util::{Invoke, Port};
use crate::util::serde::Port;
use crate::util::Invoke;
use crate::{Error, ErrorKind, ResultExt};
pub struct NginxController {

View File

@@ -19,7 +19,7 @@ use tracing::instrument;
use super::interface::{InterfaceId, TorConfig};
use crate::context::RpcContext;
use crate::s9pk::manifest::PackageId;
use crate::util::{display_serializable, IoFormat};
use crate::util::serde::{display_serializable, IoFormat};
use crate::{Error, ErrorKind, ResultExt as _};
#[test]

View File

@@ -11,7 +11,8 @@ use tokio::sync::RwLock;
use tracing::instrument;
use crate::context::RpcContext;
use crate::util::{display_none, display_serializable, Invoke, IoFormat};
use crate::util::serde::{display_serializable, IoFormat};
use crate::util::{display_none, Invoke};
use crate::{Error, ErrorKind};
#[command(subcommands(add, connect, delete, get, set_country))]

View File

@@ -14,7 +14,8 @@ use crate::backup::BackupReport;
use crate::context::RpcContext;
use crate::db::util::WithRevision;
use crate::s9pk::manifest::PackageId;
use crate::util::{display_none, display_serializable};
use crate::util::display_none;
use crate::util::serde::display_serializable;
use crate::{Error, ErrorKind, ResultExt};
#[command(subcommands(list, delete, delete_before, create))]

View File

@@ -11,17 +11,23 @@ use openssl::x509::X509;
use rpc_toolkit::command;
use rpc_toolkit::yajrc::RpcError;
use serde::{Deserialize, Serialize};
use sqlx::{Executor, Sqlite};
use tokio::fs::File;
use tokio::io::AsyncWriteExt;
use torut::onion::{OnionAddressV3, TorSecretKeyV3};
use tracing::instrument;
use crate::backup::restore::recover_full_embassy;
use crate::backup::target::BackupTargetFS;
use crate::context::rpc::RpcContextConfig;
use crate::context::SetupContext;
use crate::db::model::RecoveredPackageInfo;
use crate::disk::main::DEFAULT_PASSWORD;
use crate::disk::util::{pvscan, DiskInfo, PartitionInfo, TmpMountGuard};
use crate::disk::mount::filesystem::block_dev::BlockDev;
use crate::disk::mount::filesystem::cifs::Cifs;
use crate::disk::mount::guard::TmpMountGuard;
use crate::disk::util::{pvscan, recovery_info, DiskInfo, EmbassyOsRecoveryInfo};
use crate::hostname::PRODUCT_KEY_PATH;
use crate::id::Id;
use crate::init::init;
use crate::install::PKG_PUBLIC_DIR;
@@ -33,7 +39,20 @@ use crate::util::Version;
use crate::volume::{data_dir, VolumeId};
use crate::{ensure_code, Error, ResultExt};
#[command(subcommands(status, disk, execute, recovery))]
#[instrument(skip(secrets))]
pub async fn password_hash<Ex>(secrets: &mut Ex) -> Result<String, Error>
where
for<'a> &'a mut Ex: Executor<'a, Database = Sqlite>,
{
let password = sqlx::query!("SELECT password FROM account")
.fetch_one(secrets)
.await?
.password;
Ok(password)
}
#[command(subcommands(status, disk, execute, recovery, cifs, complete))]
pub fn setup() -> Result<(), Error> {
Ok(())
}
@@ -51,7 +70,7 @@ pub async fn status(#[context] ctx: SetupContext) -> Result<StatusRes, Error> {
product_key: tokio::fs::metadata("/embassy-os/product_key.txt")
.await
.is_ok(),
migrating: ctx.recovery_status.read().await.is_some(), // TODO
migrating: ctx.recovery_status.read().await.is_some(),
})
}
@@ -65,16 +84,35 @@ pub async fn list_disks() -> Result<Vec<DiskInfo>, Error> {
crate::disk::list(None).await
}
#[command(subcommands(recovery_status))]
#[command(subcommands(v2, recovery_status))]
pub fn recovery() -> Result<(), Error> {
Ok(())
}
#[command(subcommands(set))]
pub fn v2() -> Result<(), Error> {
Ok(())
}
#[command(rpc_only, metadata(authenticated = false))]
pub async fn set(#[context] ctx: SetupContext, #[arg] logicalname: PathBuf) -> Result<(), Error> {
let guard = TmpMountGuard::mount(&BlockDev::new(&logicalname)).await?;
let product_key = tokio::fs::read_to_string(guard.as_ref().join("root/agent/product_key"))
.await?
.trim()
.to_owned();
guard.unmount().await?;
*ctx.cached_product_key.write().await = Some(Arc::new(product_key));
*ctx.selected_v2_drive.write().await = Some(logicalname);
Ok(())
}
#[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct RecoveryStatus {
pub bytes_transferred: u64,
pub total_bytes: u64,
pub complete: bool,
}
#[command(rename = "status", rpc_only, metadata(authenticated = false))]
@@ -84,6 +122,30 @@ pub async fn recovery_status(
ctx.recovery_status.read().await.clone().transpose()
}
#[command(subcommands(verify_cifs))]
pub fn cifs() -> Result<(), Error> {
Ok(())
}
#[command(rename = "verify", rpc_only)]
pub async fn verify_cifs(
#[arg] hostname: String,
#[arg] path: PathBuf,
#[arg] username: String,
#[arg] password: Option<String>,
) -> Result<EmbassyOsRecoveryInfo, Error> {
let guard = TmpMountGuard::mount(&Cifs {
hostname,
path,
username,
password,
})
.await?;
let embassy_os = recovery_info(&guard).await?;
guard.unmount().await?;
embassy_os.ok_or_else(|| Error::new(eyre!("No Backup Found"), crate::ErrorKind::NotFound))
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct SetupResult {
@@ -97,14 +159,17 @@ pub async fn execute(
#[context] ctx: SetupContext,
#[arg(rename = "embassy-logicalname")] embassy_logicalname: PathBuf,
#[arg(rename = "embassy-password")] embassy_password: String,
#[arg(rename = "recovery-partition")] recovery_partition: Option<PartitionInfo>,
#[arg(rename = "recovery-source")] mut recovery_source: Option<BackupTargetFS>,
#[arg(rename = "recovery-password")] recovery_password: Option<String>,
) -> Result<SetupResult, Error> {
if let Some(v2_drive) = &*ctx.selected_v2_drive.read().await {
recovery_source = Some(BackupTargetFS::Disk(BlockDev::new(v2_drive.clone())))
}
match execute_inner(
ctx,
embassy_logicalname,
embassy_password,
recovery_partition,
recovery_source,
recovery_password,
)
.await
@@ -126,7 +191,23 @@ pub async fn execute(
}
#[instrument(skip(ctx))]
pub async fn complete_setup(ctx: SetupContext, guid: Arc<String>) -> Result<(), Error> {
#[command(rpc_only)]
pub async fn complete(#[context] ctx: SetupContext) -> Result<(), Error> {
let guid = if let Some(guid) = &*ctx.disk_guid.read().await {
guid.clone()
} else {
return Err(Error::new(
eyre!("setup.execute has not completed successfully"),
crate::ErrorKind::InvalidRequest,
));
};
if tokio::fs::metadata(PRODUCT_KEY_PATH).await.is_err() {
let mut pkey_file = File::create(PRODUCT_KEY_PATH).await?;
pkey_file
.write_all(ctx.product_key().await?.as_bytes())
.await?;
pkey_file.sync_all().await?;
}
let mut guid_file = File::create("/embassy-os/disk.guid").await?;
guid_file.write_all(guid.as_bytes()).await?;
guid_file.sync_all().await?;
@@ -139,7 +220,7 @@ pub async fn execute_inner(
ctx: SetupContext,
embassy_logicalname: PathBuf,
embassy_password: String,
recovery_partition: Option<PartitionInfo>,
recovery_source: Option<BackupTargetFS>,
recovery_password: Option<String>,
) -> Result<(OnionAddressV3, X509), Error> {
if ctx.recovery_status.read().await.is_some() {
@@ -159,27 +240,25 @@ pub async fn execute_inner(
);
crate::disk::main::import(&*guid, &ctx.datadir, DEFAULT_PASSWORD).await?;
let res = if let Some(recovery_partition) = recovery_partition {
if recovery_partition
.embassy_os
.as_ref()
.map(|v| &*v.version < &emver::Version::new(0, 2, 8, 0))
.unwrap_or(true)
{
return Err(Error::new(eyre!("Unsupported version of EmbassyOS. Please update to at least 0.2.8 before recovering."), crate::ErrorKind::VersionIncompatible));
}
let res = if let Some(recovery_source) = recovery_source {
let (tor_addr, root_ca, recover_fut) = recover(
ctx.clone(),
guid.clone(),
embassy_password,
recovery_partition,
recovery_source,
recovery_password,
)
.await?;
init(&RpcContextConfig::load(ctx.config_path.as_ref()).await?).await?;
tokio::spawn(async move {
if let Err(e) = recover_fut
.and_then(|_| complete_setup(ctx.clone(), guid))
.and_then(|_| async {
*ctx.disk_guid.write().await = Some(guid);
if let Some(Ok(recovery_status)) = &mut *ctx.recovery_status.write().await {
recovery_status.complete = true;
}
Ok(())
})
.await
{
BEETHOVEN.play().await.unwrap_or_default(); // ignore error in playing the song
@@ -192,7 +271,7 @@ pub async fn execute_inner(
} else {
let res = fresh_setup(&ctx, &embassy_password).await?;
init(&RpcContextConfig::load(ctx.config_path.as_ref()).await?).await?;
complete_setup(ctx, guid).await?;
*ctx.disk_guid.write().await = Some(guid);
res
};
@@ -233,11 +312,12 @@ async fn recover(
ctx: SetupContext,
guid: Arc<String>,
embassy_password: String,
recovery_partition: PartitionInfo,
recovery_source: BackupTargetFS,
recovery_password: Option<String>,
) -> Result<(OnionAddressV3, X509, BoxFuture<'static, Result<(), Error>>), Error> {
let recovery_version = recovery_partition
.embassy_os
let recovery_source = TmpMountGuard::mount(&recovery_source).await?;
let recovery_version = recovery_info(&recovery_source)
.await?
.as_ref()
.map(|i| i.version.clone())
.unwrap_or_default();
@@ -246,14 +326,14 @@ async fn recover(
(
tor_addr,
root_ca,
recover_v2(ctx.clone(), recovery_partition).boxed(),
recover_v2(ctx.clone(), recovery_source).boxed(),
)
} else if recovery_version.major() == 0 && recovery_version.minor() == 3 {
recover_full_embassy(
ctx.clone(),
guid.clone(),
embassy_password,
recovery_partition,
recovery_source,
recovery_password,
)
.await?
@@ -332,14 +412,12 @@ fn dir_copy<'a, P0: AsRef<Path> + 'a + Send + Sync, P1: AsRef<Path> + 'a + Send
}
#[instrument(skip(ctx))]
async fn recover_v2(ctx: SetupContext, recovery_partition: PartitionInfo) -> Result<(), Error> {
let recovery = TmpMountGuard::mount(&recovery_partition.logicalname, None).await?;
async fn recover_v2(ctx: SetupContext, recovery_source: TmpMountGuard) -> Result<(), Error> {
let secret_store = ctx.secret_store().await?;
let db = ctx.db(&secret_store).await?;
let mut handle = db.handle();
let apps_yaml_path = recovery
let apps_yaml_path = recovery_source
.as_ref()
.join("root")
.join("appmgr")
@@ -358,7 +436,7 @@ async fn recover_v2(ctx: SetupContext, recovery_partition: PartitionInfo) -> Res
})?)
.await?;
let volume_path = recovery.as_ref().join("root/volumes");
let volume_path = recovery_source.as_ref().join("root/volumes");
let mut total_bytes = 0;
for (pkg_id, _) in &packages {
let volume_src_path = volume_path.join(&pkg_id);
@@ -372,6 +450,7 @@ async fn recover_v2(ctx: SetupContext, recovery_partition: PartitionInfo) -> Res
*ctx.recovery_status.write().await = Some(Ok(RecoveryStatus {
bytes_transferred: 0,
total_bytes,
complete: false,
}));
let bytes_transferred = AtomicU64::new(0);
let volume_id = VolumeId::Custom(Id::try_from("main".to_owned())?);
@@ -398,11 +477,12 @@ async fn recover_v2(ctx: SetupContext, recovery_partition: PartitionInfo) -> Res
*ctx.recovery_status.write().await = Some(Ok(RecoveryStatus {
bytes_transferred: bytes_transferred.load(Ordering::Relaxed),
total_bytes,
complete: false
}));
}
} => (),
);
let tor_src_path = recovery
let tor_src_path = recovery_source
.as_ref()
.join("var/lib/tor")
.join(format!("app-{}", pkg_id))
@@ -430,7 +510,7 @@ async fn recover_v2(ctx: SetupContext, recovery_partition: PartitionInfo) -> Res
let icon_leaf = AsRef::<Path>::as_ref(&pkg_id)
.join(info.version.as_str())
.join("icon.png");
let icon_src_path = recovery
let icon_src_path = recovery_source
.as_ref()
.join("root/agent/icons")
.join(format!("{}.png", pkg_id));
@@ -468,6 +548,6 @@ async fn recover_v2(ctx: SetupContext, recovery_partition: PartitionInfo) -> Res
}
secret_store.close().await;
recovery.unmount().await?;
recovery_source.unmount().await?;
Ok(())
}

View File

@@ -8,15 +8,16 @@ use sqlx::{Pool, Sqlite};
use tracing::instrument;
use crate::context::RpcContext;
use crate::util::{display_none, display_serializable, IoFormat};
use crate::util::display_none;
use crate::util::serde::{display_serializable, IoFormat};
use crate::{Error, ErrorKind};
static SSH_AUTHORIZED_KEYS_FILE: &str = "/root/.ssh/authorized_keys";
#[derive(Debug, serde::Deserialize, serde::Serialize)]
pub struct PubKey(
#[serde(serialize_with = "crate::util::serialize_display")]
#[serde(deserialize_with = "crate::util::deserialize_from_str")]
#[serde(serialize_with = "crate::util::serde::serialize_display")]
#[serde(deserialize_with = "crate::util::serde::deserialize_from_str")]
openssh_keys::PublicKey,
);

View File

@@ -9,7 +9,8 @@ use crate::action::{ActionImplementation, NoOutput};
use crate::context::RpcContext;
use crate::id::Id;
use crate::s9pk::manifest::PackageId;
use crate::util::{Duration, Version};
use crate::util::serde::Duration;
use crate::util::Version;
use crate::volume::Volumes;
use crate::Error;

View File

@@ -12,7 +12,8 @@ use crate::db::util::WithRevision;
use crate::disk::util::{get_available, get_percentage, get_used};
use crate::logs::{display_logs, fetch_logs, LogResponse, LogSource};
use crate::shutdown::Shutdown;
use crate::util::{display_none, display_serializable, IoFormat};
use crate::util::display_none;
use crate::util::serde::{display_serializable, IoFormat};
use crate::{Error, ErrorKind};
pub const SYSTEMD_UNIT: &'static str = "embassyd";

View File

@@ -25,12 +25,18 @@ use tracing::instrument;
use crate::context::RpcContext;
use crate::db::model::{ServerStatus, UpdateProgress};
use crate::db::util::WithRevision;
use crate::disk::mount::filesystem::label::Label;
use crate::disk::mount::filesystem::FileSystem;
use crate::disk::mount::guard::TmpMountGuard;
use crate::disk::BOOT_RW_PATH;
use crate::notifications::NotificationLevel;
use crate::update::latest_information::LatestInformation;
use crate::util::Invoke;
use crate::version::{Current, VersionT};
use crate::{Error, ErrorKind, ResultExt};
mod latest_information;
lazy_static! {
static ref UPDATED: AtomicBool = AtomicBool::new(false);
}
@@ -78,89 +84,24 @@ fn display_update_result(status: WithRevision<UpdateResult>, _: &ArgMatches<'_>)
}
const HEADER_KEY: &str = "x-eos-hash";
mod latest_information;
#[derive(Debug, Clone, Copy)]
enum WritableDrives {
Green,
Blue,
}
#[derive(Debug, Clone, Copy)]
struct Boot;
/// We are going to be creating some folders and mounting so
/// we need to know the labels for those types. These labels
/// are the labels that are shipping with the embassy, blue/ green
/// are where the os sits and will do a swap during update.
trait FileType: std::fmt::Debug + Copy + Send + Sync + 'static {
fn mount_folder(&self) -> PathBuf {
Path::new("/media").join(self.label())
}
fn label(&self) -> &'static str;
fn block_dev(&self) -> &'static Path;
}
impl FileType for WritableDrives {
impl WritableDrives {
fn label(&self) -> &'static str {
match self {
WritableDrives::Green => "green",
WritableDrives::Blue => "blue",
Self::Green => "green",
Self::Blue => "blue",
}
}
fn block_dev(&self) -> &'static Path {
Path::new(match self {
WritableDrives::Green => "/dev/mmcblk0p3",
WritableDrives::Blue => "/dev/mmcblk0p4",
})
fn block_dev(&self) -> PathBuf {
Path::new("/dev/disk/by-label").join(self.label())
}
}
impl FileType for Boot {
fn label(&self) -> &'static str {
"system-boot"
}
fn block_dev(&self) -> &'static Path {
Path::new("/dev/mmcblk0p1")
}
}
/// Proven data that this is mounted, should be consumed in an unmount
#[derive(Debug)]
struct MountedResource<X: FileType> {
value: X,
mounted: bool,
}
impl<X: FileType> MountedResource<X> {
fn new(value: X) -> Self {
MountedResource {
value,
mounted: true,
}
}
#[instrument]
async fn unmount(value: X) -> Result<(), Error> {
let folder = value.mount_folder();
Command::new("umount")
.arg(&folder)
.invoke(crate::ErrorKind::Filesystem)
.await?;
tokio::fs::remove_dir_all(&folder)
.await
.with_ctx(|_| (crate::ErrorKind::Filesystem, folder.display().to_string()))?;
Ok(())
}
#[instrument]
async fn unmount_label(&mut self) -> Result<(), Error> {
Self::unmount(self.value).await?;
self.mounted = false;
Ok(())
}
}
impl<X: FileType> Drop for MountedResource<X> {
fn drop(&mut self) {
if self.mounted {
let value = self.value;
tokio::spawn(async move { Self::unmount(value).await.expect("failed to unmount") });
}
fn as_fs(&self) -> impl FileSystem {
Label::new(self.label())
}
}
@@ -222,7 +163,6 @@ async fn maybe_do_update(ctx: RpcContext) -> Result<Option<Arc<Revision>>, Error
ServerStatus::Running => (),
}
let mounted_boot = mount_label(Boot).await?;
let (new_label, _current_label) = query_mounted_label().await?;
let (size, download) = download_file(
ctx.db.handle(),
@@ -243,7 +183,7 @@ async fn maybe_do_update(ctx: RpcContext) -> Result<Option<Arc<Revision>>, Error
tokio::spawn(async move {
let mut db = ctx.db.handle();
let res = do_update(download, new_label, mounted_boot).await;
let res = do_update(download, new_label).await;
let mut info = crate::db::DatabaseModel::new()
.server_info()
.get_mut(&mut db)
@@ -280,12 +220,9 @@ async fn maybe_do_update(ctx: RpcContext) -> Result<Option<Arc<Revision>>, Error
async fn do_update(
download: impl Future<Output = Result<(), Error>>,
new_label: NewLabel,
mut mounted_boot: MountedResource<Boot>,
) -> Result<(), Error> {
download.await?;
swap_boot_label(new_label, &mounted_boot).await?;
mounted_boot.unmount_label().await?;
swap_boot_label(new_label).await?;
Ok(())
}
@@ -354,13 +291,13 @@ async fn download_file<'a, Db: DbHandle + 'a>(
.map(|l| l.parse())
.transpose()?;
Ok((size, async move {
let hash_from_header: String = "".to_owned(); // download_request
// .headers()
// .get(HEADER_KEY)
// .ok_or_else(|| Error::new(eyre!("No {} in headers", HEADER_KEY), ErrorKind::Network))?
// .to_str()
// .with_kind(ErrorKind::InvalidRequest)?
// .to_owned();
let hash_from_header: String = download_request
.headers()
.get(HEADER_KEY)
.ok_or_else(|| Error::new(eyre!("No {} in headers", HEADER_KEY), ErrorKind::Network))?
.to_str()
.with_kind(ErrorKind::InvalidRequest)?
.to_owned();
let stream_download = download_request.bytes_stream();
let file_sum = write_stream_to_label(&mut db, size, stream_download, new_label).await?;
check_download(&hash_from_header, file_sum).await?;
@@ -408,39 +345,36 @@ async fn write_stream_to_label<Db: DbHandle>(
#[instrument]
async fn check_download(hash_from_header: &str, file_digest: Vec<u8>) -> Result<(), Error> {
// if hex::decode(hash_from_header).with_kind(ErrorKind::Network)? != file_digest {
// return Err(Error::new(
// eyre!("Hash sum does not match source"),
// ErrorKind::Network,
// ));
// }
if hex::decode(hash_from_header).with_kind(ErrorKind::Network)? != file_digest {
return Err(Error::new(
eyre!("Hash sum does not match source"),
ErrorKind::Network,
));
}
Ok(())
}
#[instrument]
async fn swap_boot_label(
new_label: NewLabel,
mounted_boot: &MountedResource<Boot>,
) -> Result<(), Error> {
async fn swap_boot_label(new_label: NewLabel) -> Result<(), Error> {
let block_dev = new_label.0.block_dev();
Command::new("e2label")
.arg(block_dev)
.arg(new_label.0.label())
.invoke(crate::ErrorKind::BlockDevice)
.await?;
let mut mounted = mount_label(new_label.0).await?;
let mounted = TmpMountGuard::mount(&new_label.0.as_fs()).await?;
let sedcmd = format!("s/LABEL=\\(blue\\|green\\)/LABEL={}/g", new_label.0.label());
Command::new("sed")
.arg("-i")
.arg(&sedcmd)
.arg(mounted.value.mount_folder().join("etc/fstab"))
.arg(mounted.as_ref().join("etc/fstab"))
.output()
.await?;
mounted.unmount_label().await?;
mounted.unmount().await?;
Command::new("sed")
.arg("-i")
.arg(&sedcmd)
.arg(mounted_boot.value.mount_folder().join("cmdline.txt"))
.arg(Path::new(BOOT_RW_PATH).join("cmdline.txt"))
.output()
.await?;
@@ -448,24 +382,6 @@ async fn swap_boot_label(
Ok(())
}
#[instrument]
async fn mount_label<F>(file_type: F) -> Result<MountedResource<F>, Error>
where
F: FileType,
{
let label = file_type.label();
let folder = file_type.mount_folder();
tokio::fs::create_dir_all(&folder)
.await
.with_ctx(|_| (crate::ErrorKind::Filesystem, folder.display().to_string()))?;
Command::new("mount")
.arg("-L")
.arg(label)
.arg(folder)
.invoke(crate::ErrorKind::Filesystem)
.await?;
Ok(MountedResource::new(file_type))
}
/// Captured from doing an fstab with an embassy box and the cat from the /etc/fstab
#[test]
fn test_capture() {

View File

@@ -4,10 +4,11 @@ use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::process::{exit, Stdio};
use std::process::Stdio;
use std::str::FromStr;
use std::sync::Arc;
use ::serde::{Deserialize, Deserializer, Serialize, Serializer};
use async_trait::async_trait;
use clap::ArgMatches;
use color_eyre::eyre::{self, eyre};
@@ -17,8 +18,6 @@ use futures::future::BoxFuture;
use futures::FutureExt;
use lazy_static::lazy_static;
use patch_db::{HasModel, Model};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json::Value;
use tokio::fs::File;
use tokio::sync::{Mutex, OwnedMutexGuard, RwLock};
use tokio::task::{JoinError, JoinHandle};
@@ -29,6 +28,7 @@ use crate::{Error, ResultExt as _};
pub mod io;
pub mod logger;
pub mod serde;
#[derive(Clone, Copy, Debug)]
pub enum Never {}
@@ -84,90 +84,6 @@ pub trait ApplyRef {
impl<T> Apply for T {}
impl<T> ApplyRef for T {}
pub fn deserialize_from_str<
'de,
D: serde::de::Deserializer<'de>,
T: FromStr<Err = E>,
E: std::fmt::Display,
>(
deserializer: D,
) -> std::result::Result<T, D::Error> {
struct Visitor<T: FromStr<Err = E>, E>(std::marker::PhantomData<T>);
impl<'de, T: FromStr<Err = Err>, Err: std::fmt::Display> serde::de::Visitor<'de>
for Visitor<T, Err>
{
type Value = T;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(formatter, "a parsable string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
v.parse().map_err(|e| serde::de::Error::custom(e))
}
}
deserializer.deserialize_str(Visitor(std::marker::PhantomData))
}
pub fn deserialize_from_str_opt<
'de,
D: serde::de::Deserializer<'de>,
T: FromStr<Err = E>,
E: std::fmt::Display,
>(
deserializer: D,
) -> std::result::Result<Option<T>, D::Error> {
struct Visitor<T: FromStr<Err = E>, E>(std::marker::PhantomData<T>);
impl<'de, T: FromStr<Err = Err>, Err: std::fmt::Display> serde::de::Visitor<'de>
for Visitor<T, Err>
{
type Value = Option<T>;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(formatter, "a parsable string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
v.parse().map(Some).map_err(|e| serde::de::Error::custom(e))
}
fn visit_some<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: serde::de::Deserializer<'de>,
{
deserializer.deserialize_str(Visitor(std::marker::PhantomData))
}
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(None)
}
fn visit_unit<E>(self) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(None)
}
}
deserializer.deserialize_any(Visitor(std::marker::PhantomData))
}
pub fn serialize_display<T: std::fmt::Display, S: Serializer>(
t: &T,
serializer: S,
) -> Result<S::Ok, S::Error> {
String::serialize(&t.to_string(), serializer)
}
pub fn serialize_display_opt<T: std::fmt::Display, S: Serializer>(
t: &Option<T>,
serializer: S,
) -> Result<S::Ok, S::Error> {
Option::<String>::serialize(&t.as_ref().map(|t| t.to_string()), serializer)
}
pub async fn daemon<F: FnMut() -> Fut, Fut: Future<Output = ()> + Send + 'static>(
mut f: F,
cooldown: std::time::Duration,
@@ -206,134 +122,6 @@ impl<T> SNone<T> {
}
impl<T> SOption<T> for SNone<T> {}
#[derive(Debug, Serialize)]
#[serde(untagged)]
pub enum ValuePrimative {
Null,
Boolean(bool),
String(String),
Number(serde_json::Number),
}
impl<'de> serde::de::Deserialize<'de> for ValuePrimative {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = ValuePrimative;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a JSON primative value")
}
fn visit_unit<E>(self) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Null)
}
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Null)
}
fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Boolean(v))
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::String(v.to_owned()))
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::String(v))
}
fn visit_f32<E>(self, v: f32) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(
serde_json::Number::from_f64(v as f64).ok_or_else(|| {
serde::de::Error::invalid_value(
serde::de::Unexpected::Float(v as f64),
&"a finite number",
)
})?,
))
}
fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(
serde_json::Number::from_f64(v).ok_or_else(|| {
serde::de::Error::invalid_value(
serde::de::Unexpected::Float(v),
&"a finite number",
)
})?,
))
}
fn visit_u8<E>(self, v: u8) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_u16<E>(self, v: u16) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_u32<E>(self, v: u32) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_i8<E>(self, v: i8) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_i16<E>(self, v: i16) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_i32<E>(self, v: i32) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
}
deserializer.deserialize_any(Visitor)
}
}
#[derive(Debug, Clone)]
pub struct Version {
version: emver::Version,
@@ -419,7 +207,7 @@ impl<'de> Deserialize<'de> for Version {
D: Deserializer<'de>,
{
let string = String::deserialize(deserializer)?;
let version = emver::Version::from_str(&string).map_err(serde::de::Error::custom)?;
let version = emver::Version::from_str(&string).map_err(::serde::de::Error::custom)?;
Ok(Self { string, version })
}
}
@@ -478,273 +266,10 @@ impl<W: std::fmt::Write> std::io::Write for FmtWriter<W> {
}
}
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub enum IoFormat {
Json,
JsonPretty,
Yaml,
Cbor,
Toml,
TomlPretty,
}
impl Default for IoFormat {
fn default() -> Self {
IoFormat::JsonPretty
}
}
impl std::fmt::Display for IoFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use IoFormat::*;
match self {
Json => write!(f, "JSON"),
JsonPretty => write!(f, "JSON (pretty)"),
Yaml => write!(f, "YAML"),
Cbor => write!(f, "CBOR"),
Toml => write!(f, "TOML"),
TomlPretty => write!(f, "TOML (pretty)"),
}
}
}
impl std::str::FromStr for IoFormat {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
serde_json::from_value(Value::String(s.to_owned()))
.with_kind(crate::ErrorKind::Deserialization)
}
}
impl IoFormat {
pub fn to_writer<W: std::io::Write, T: Serialize>(
&self,
mut writer: W,
value: &T,
) -> Result<(), Error> {
match self {
IoFormat::Json => {
serde_json::to_writer(writer, value).with_kind(crate::ErrorKind::Serialization)
}
IoFormat::JsonPretty => serde_json::to_writer_pretty(writer, value)
.with_kind(crate::ErrorKind::Serialization),
IoFormat::Yaml => {
serde_yaml::to_writer(writer, value).with_kind(crate::ErrorKind::Serialization)
}
IoFormat::Cbor => serde_cbor::ser::into_writer(value, writer)
.with_kind(crate::ErrorKind::Serialization),
IoFormat::Toml => writer
.write_all(
&serde_toml::to_vec(
&serde_toml::Value::try_from(value)
.with_kind(crate::ErrorKind::Serialization)?,
)
.with_kind(crate::ErrorKind::Serialization)?,
)
.with_kind(crate::ErrorKind::Serialization),
IoFormat::TomlPretty => writer
.write_all(
serde_toml::to_string_pretty(
&serde_toml::Value::try_from(value)
.with_kind(crate::ErrorKind::Serialization)?,
)
.with_kind(crate::ErrorKind::Serialization)?
.as_bytes(),
)
.with_kind(crate::ErrorKind::Serialization),
}
}
pub fn to_vec<T: Serialize>(&self, value: &T) -> Result<Vec<u8>, Error> {
match self {
IoFormat::Json => serde_json::to_vec(value).with_kind(crate::ErrorKind::Serialization),
IoFormat::JsonPretty => {
serde_json::to_vec_pretty(value).with_kind(crate::ErrorKind::Serialization)
}
IoFormat::Yaml => serde_yaml::to_vec(value).with_kind(crate::ErrorKind::Serialization),
IoFormat::Cbor => {
let mut res = Vec::new();
serde_cbor::ser::into_writer(value, &mut res)
.with_kind(crate::ErrorKind::Serialization)?;
Ok(res)
}
IoFormat::Toml => serde_toml::to_vec(
&serde_toml::Value::try_from(value).with_kind(crate::ErrorKind::Serialization)?,
)
.with_kind(crate::ErrorKind::Serialization),
IoFormat::TomlPretty => serde_toml::to_string_pretty(
&serde_toml::Value::try_from(value).with_kind(crate::ErrorKind::Serialization)?,
)
.map(|s| s.into_bytes())
.with_kind(crate::ErrorKind::Serialization),
}
}
/// BLOCKING
pub fn from_reader<R: std::io::Read, T: for<'de> Deserialize<'de>>(
&self,
mut reader: R,
) -> Result<T, Error> {
match self {
IoFormat::Json | IoFormat::JsonPretty => {
serde_json::from_reader(reader).with_kind(crate::ErrorKind::Deserialization)
}
IoFormat::Yaml => {
serde_yaml::from_reader(reader).with_kind(crate::ErrorKind::Deserialization)
}
IoFormat::Cbor => {
serde_cbor::de::from_reader(reader).with_kind(crate::ErrorKind::Deserialization)
}
IoFormat::Toml | IoFormat::TomlPretty => {
let mut s = String::new();
reader
.read_to_string(&mut s)
.with_kind(crate::ErrorKind::Deserialization)?;
serde_toml::from_str(&s).with_kind(crate::ErrorKind::Deserialization)
}
}
}
pub fn from_slice<T: for<'de> Deserialize<'de>>(&self, slice: &[u8]) -> Result<T, Error> {
match self {
IoFormat::Json | IoFormat::JsonPretty => {
serde_json::from_slice(slice).with_kind(crate::ErrorKind::Deserialization)
}
IoFormat::Yaml => {
serde_yaml::from_slice(slice).with_kind(crate::ErrorKind::Deserialization)
}
IoFormat::Cbor => {
serde_cbor::de::from_reader(slice).with_kind(crate::ErrorKind::Deserialization)
}
IoFormat::Toml | IoFormat::TomlPretty => {
serde_toml::from_slice(slice).with_kind(crate::ErrorKind::Deserialization)
}
}
}
}
pub fn display_serializable<T: Serialize>(t: T, matches: &ArgMatches<'_>) {
let format = match matches.value_of("format").map(|f| f.parse()) {
Some(Ok(f)) => f,
Some(Err(_)) => {
eprintln!("unrecognized formatter");
exit(1)
}
None => IoFormat::default(),
};
format
.to_writer(std::io::stdout(), &t)
.expect("Error serializing result to stdout")
}
pub fn display_none<T>(_: T, _: &ArgMatches) {
()
}
pub fn parse_stdin_deserializable<T: for<'de> Deserialize<'de>>(
stdin: &mut std::io::Stdin,
matches: &ArgMatches<'_>,
) -> Result<T, Error> {
let format = match matches.value_of("format").map(|f| f.parse()) {
Some(Ok(f)) => f,
Some(Err(_)) => {
eprintln!("unrecognized formatter");
exit(1)
}
None => IoFormat::default(),
};
format.from_reader(stdin)
}
#[derive(Debug, Clone, Copy)]
pub struct Duration(std::time::Duration);
impl Deref for Duration {
type Target = std::time::Duration;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<std::time::Duration> for Duration {
fn from(t: std::time::Duration) -> Self {
Duration(t)
}
}
impl std::str::FromStr for Duration {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let units_idx = s.find(|c: char| c.is_alphabetic()).ok_or_else(|| {
Error::new(
eyre!("Must specify units for duration"),
crate::ErrorKind::Deserialization,
)
})?;
let (num, units) = s.split_at(units_idx);
use std::time::Duration;
Ok(Duration(match units {
"d" if num.contains(".") => Duration::from_secs_f64(num.parse::<f64>()? * 86_400_f64),
"d" => Duration::from_secs(num.parse::<u64>()? * 86_400),
"h" if num.contains(".") => Duration::from_secs_f64(num.parse::<f64>()? * 3_600_f64),
"h" => Duration::from_secs(num.parse::<u64>()? * 3_600),
"m" if num.contains(".") => Duration::from_secs_f64(num.parse::<f64>()? * 60_f64),
"m" => Duration::from_secs(num.parse::<u64>()? * 60),
"s" if num.contains(".") => Duration::from_secs_f64(num.parse()?),
"s" => Duration::from_secs(num.parse()?),
"ms" if num.contains(".") => Duration::from_secs_f64(num.parse::<f64>()? / 1_000_f64),
"ms" => {
let millis: u128 = num.parse()?;
Duration::new((millis / 1_000) as u64, (millis % 1_000) as u32)
}
"us" | "µs" if num.contains(".") => {
Duration::from_secs_f64(num.parse::<f64>()? / 1_000_000_f64)
}
"us" | "µs" => {
let micros: u128 = num.parse()?;
Duration::new((micros / 1_000_000) as u64, (micros % 1_000_000) as u32)
}
"ns" if num.contains(".") => {
Duration::from_secs_f64(num.parse::<f64>()? / 1_000_000_000_f64)
}
"ns" => {
let nanos: u128 = num.parse()?;
Duration::new(
(nanos / 1_000_000_000) as u64,
(nanos % 1_000_000_000) as u32,
)
}
_ => {
return Err(Error::new(
eyre!("Invalid units for duration"),
crate::ErrorKind::Deserialization,
))
}
}))
}
}
impl std::fmt::Display for Duration {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let nanos = self.as_nanos();
match () {
_ if nanos % 86_400_000_000_000 == 0 => write!(f, "{}d", nanos / 86_400_000_000_000),
_ if nanos % 3_600_000_000_000 == 0 => write!(f, "{}h", nanos / 3_600_000_000_000),
_ if nanos % 60_000_000_000 == 0 => write!(f, "{}m", nanos / 60_000_000_000),
_ if nanos % 1_000_000_000 == 0 => write!(f, "{}s", nanos / 1_000_000_000),
_ if nanos % 1_000_000 == 0 => write!(f, "{}ms", nanos / 1_000_000),
_ if nanos % 1_000 == 0 => write!(f, "{}µs", nanos / 1_000),
_ => write!(f, "{}ns", nanos),
}
}
}
impl<'de> Deserialize<'de> for Duration {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserialize_from_str(deserializer)
}
}
impl Serialize for Duration {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serialize_display(self, serializer)
}
}
pub struct Container<T>(RwLock<Option<T>>);
impl<T> Container<T> {
pub fn new(value: Option<T>) -> Self {
@@ -793,85 +318,6 @@ impl<H: Digest, W: std::io::Write> std::io::Write for HashWriter<H, W> {
}
}
pub fn deserialize_number_permissive<
'de,
D: serde::de::Deserializer<'de>,
T: FromStr<Err = E> + num::cast::FromPrimitive,
E: std::fmt::Display,
>(
deserializer: D,
) -> std::result::Result<T, D::Error> {
struct Visitor<T: FromStr<Err = E> + num::cast::FromPrimitive, E>(std::marker::PhantomData<T>);
impl<'de, T: FromStr<Err = Err> + num::cast::FromPrimitive, Err: std::fmt::Display>
serde::de::Visitor<'de> for Visitor<T, Err>
{
type Value = T;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(formatter, "a parsable string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
v.parse().map_err(|e| serde::de::Error::custom(e))
}
fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
T::from_f64(v).ok_or_else(|| {
serde::de::Error::custom(format!(
"{} cannot be represented by the requested type",
v
))
})
}
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
T::from_u64(v).ok_or_else(|| {
serde::de::Error::custom(format!(
"{} cannot be represented by the requested type",
v
))
})
}
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
T::from_i64(v).ok_or_else(|| {
serde::de::Error::custom(format!(
"{} cannot be represented by the requested type",
v
))
})
}
}
deserializer.deserialize_str(Visitor(std::marker::PhantomData))
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Port(pub u16);
impl<'de> Deserialize<'de> for Port {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
//TODO: if number, be permissive
deserialize_number_permissive(deserializer).map(Port)
}
}
impl Serialize for Port {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serialize_display(&self.0, serializer)
}
}
pub trait IntoDoubleEndedIterator<T>: IntoIterator<Item = T> {
type IntoIter: Iterator<Item = T> + DoubleEndedIterator;
fn into_iter(self) -> <Self as IntoDoubleEndedIterator<T>>::IntoIter;
@@ -887,92 +333,6 @@ where
}
}
#[derive(Debug, Clone)]
pub struct Reversible<T, Container = Vec<T>>
where
for<'a> &'a Container: IntoDoubleEndedIterator<&'a T>,
{
reversed: bool,
data: Container,
phantom: PhantomData<T>,
}
impl<T, Container> Reversible<T, Container>
where
for<'a> &'a Container: IntoDoubleEndedIterator<&'a T>,
{
pub fn new(data: Container) -> Self {
Reversible {
reversed: false,
data,
phantom: PhantomData,
}
}
pub fn reverse(&mut self) {
self.reversed = !self.reversed
}
pub fn iter(
&self,
) -> itertools::Either<
<&Container as IntoDoubleEndedIterator<&T>>::IntoIter,
std::iter::Rev<<&Container as IntoDoubleEndedIterator<&T>>::IntoIter>,
> {
let iter = IntoDoubleEndedIterator::into_iter(&self.data);
if self.reversed {
itertools::Either::Right(iter.rev())
} else {
itertools::Either::Left(iter)
}
}
}
impl<T, Container> Serialize for Reversible<T, Container>
where
for<'a> &'a Container: IntoDoubleEndedIterator<&'a T>,
T: Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
use serde::ser::SerializeSeq;
let iter = IntoDoubleEndedIterator::into_iter(&self.data);
let mut seq_ser = serializer.serialize_seq(match iter.size_hint() {
(lower, Some(upper)) if lower == upper => Some(upper),
_ => None,
})?;
if self.reversed {
for elem in iter.rev() {
seq_ser.serialize_element(elem)?;
}
} else {
for elem in iter {
seq_ser.serialize_element(elem)?;
}
}
seq_ser.end()
}
}
impl<'de, T, Container> Deserialize<'de> for Reversible<T, Container>
where
for<'a> &'a Container: IntoDoubleEndedIterator<&'a T>,
Container: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(Reversible::new(Deserialize::deserialize(deserializer)?))
}
fn deserialize_in_place<D>(deserializer: D, place: &mut Self) -> Result<(), D::Error>
where
D: Deserializer<'de>,
{
Deserialize::deserialize_in_place(deserializer, &mut place.data)
}
}
#[pin_project::pin_project(PinnedDrop)]
pub struct NonDetachingJoinHandle<T>(#[pin] JoinHandle<T>);
impl<T> From<JoinHandle<T>> for NonDetachingJoinHandle<T> {

690
appmgr/src/util/serde.rs Normal file
View File

@@ -0,0 +1,690 @@
use std::marker::PhantomData;
use std::ops::Deref;
use std::process::exit;
use std::str::FromStr;
use clap::ArgMatches;
use color_eyre::eyre::eyre;
use serde::ser::{SerializeMap, SerializeSeq};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json::Value;
use super::IntoDoubleEndedIterator;
use crate::{Error, ResultExt};
pub fn deserialize_from_str<
'de,
D: serde::de::Deserializer<'de>,
T: FromStr<Err = E>,
E: std::fmt::Display,
>(
deserializer: D,
) -> std::result::Result<T, D::Error> {
struct Visitor<T: FromStr<Err = E>, E>(std::marker::PhantomData<T>);
impl<'de, T: FromStr<Err = Err>, Err: std::fmt::Display> serde::de::Visitor<'de>
for Visitor<T, Err>
{
type Value = T;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(formatter, "a parsable string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
v.parse().map_err(|e| serde::de::Error::custom(e))
}
}
deserializer.deserialize_str(Visitor(std::marker::PhantomData))
}
pub fn deserialize_from_str_opt<
'de,
D: serde::de::Deserializer<'de>,
T: FromStr<Err = E>,
E: std::fmt::Display,
>(
deserializer: D,
) -> std::result::Result<Option<T>, D::Error> {
struct Visitor<T: FromStr<Err = E>, E>(std::marker::PhantomData<T>);
impl<'de, T: FromStr<Err = Err>, Err: std::fmt::Display> serde::de::Visitor<'de>
for Visitor<T, Err>
{
type Value = Option<T>;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(formatter, "a parsable string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
v.parse().map(Some).map_err(|e| serde::de::Error::custom(e))
}
fn visit_some<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: serde::de::Deserializer<'de>,
{
deserializer.deserialize_str(Visitor(std::marker::PhantomData))
}
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(None)
}
fn visit_unit<E>(self) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(None)
}
}
deserializer.deserialize_any(Visitor(std::marker::PhantomData))
}
pub fn serialize_display<T: std::fmt::Display, S: Serializer>(
t: &T,
serializer: S,
) -> Result<S::Ok, S::Error> {
String::serialize(&t.to_string(), serializer)
}
pub fn serialize_display_opt<T: std::fmt::Display, S: Serializer>(
t: &Option<T>,
serializer: S,
) -> Result<S::Ok, S::Error> {
Option::<String>::serialize(&t.as_ref().map(|t| t.to_string()), serializer)
}
#[derive(Debug, Serialize)]
#[serde(untagged)]
pub enum ValuePrimative {
Null,
Boolean(bool),
String(String),
Number(serde_json::Number),
}
impl<'de> serde::de::Deserialize<'de> for ValuePrimative {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = ValuePrimative;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a JSON primative value")
}
fn visit_unit<E>(self) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Null)
}
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Null)
}
fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Boolean(v))
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::String(v.to_owned()))
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::String(v))
}
fn visit_f32<E>(self, v: f32) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(
serde_json::Number::from_f64(v as f64).ok_or_else(|| {
serde::de::Error::invalid_value(
serde::de::Unexpected::Float(v as f64),
&"a finite number",
)
})?,
))
}
fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(
serde_json::Number::from_f64(v).ok_or_else(|| {
serde::de::Error::invalid_value(
serde::de::Unexpected::Float(v),
&"a finite number",
)
})?,
))
}
fn visit_u8<E>(self, v: u8) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_u16<E>(self, v: u16) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_u32<E>(self, v: u32) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_i8<E>(self, v: i8) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_i16<E>(self, v: i16) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_i32<E>(self, v: i32) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ValuePrimative::Number(v.into()))
}
}
deserializer.deserialize_any(Visitor)
}
}
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub enum IoFormat {
Json,
JsonPretty,
Yaml,
Cbor,
Toml,
TomlPretty,
}
impl Default for IoFormat {
fn default() -> Self {
IoFormat::JsonPretty
}
}
impl std::fmt::Display for IoFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use IoFormat::*;
match self {
Json => write!(f, "JSON"),
JsonPretty => write!(f, "JSON (pretty)"),
Yaml => write!(f, "YAML"),
Cbor => write!(f, "CBOR"),
Toml => write!(f, "TOML"),
TomlPretty => write!(f, "TOML (pretty)"),
}
}
}
impl std::str::FromStr for IoFormat {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
serde_json::from_value(Value::String(s.to_owned()))
.with_kind(crate::ErrorKind::Deserialization)
}
}
impl IoFormat {
pub fn to_writer<W: std::io::Write, T: Serialize>(
&self,
mut writer: W,
value: &T,
) -> Result<(), Error> {
match self {
IoFormat::Json => {
serde_json::to_writer(writer, value).with_kind(crate::ErrorKind::Serialization)
}
IoFormat::JsonPretty => serde_json::to_writer_pretty(writer, value)
.with_kind(crate::ErrorKind::Serialization),
IoFormat::Yaml => {
serde_yaml::to_writer(writer, value).with_kind(crate::ErrorKind::Serialization)
}
IoFormat::Cbor => serde_cbor::ser::into_writer(value, writer)
.with_kind(crate::ErrorKind::Serialization),
IoFormat::Toml => writer
.write_all(
&serde_toml::to_vec(
&serde_toml::Value::try_from(value)
.with_kind(crate::ErrorKind::Serialization)?,
)
.with_kind(crate::ErrorKind::Serialization)?,
)
.with_kind(crate::ErrorKind::Serialization),
IoFormat::TomlPretty => writer
.write_all(
serde_toml::to_string_pretty(
&serde_toml::Value::try_from(value)
.with_kind(crate::ErrorKind::Serialization)?,
)
.with_kind(crate::ErrorKind::Serialization)?
.as_bytes(),
)
.with_kind(crate::ErrorKind::Serialization),
}
}
pub fn to_vec<T: Serialize>(&self, value: &T) -> Result<Vec<u8>, Error> {
match self {
IoFormat::Json => serde_json::to_vec(value).with_kind(crate::ErrorKind::Serialization),
IoFormat::JsonPretty => {
serde_json::to_vec_pretty(value).with_kind(crate::ErrorKind::Serialization)
}
IoFormat::Yaml => serde_yaml::to_vec(value).with_kind(crate::ErrorKind::Serialization),
IoFormat::Cbor => {
let mut res = Vec::new();
serde_cbor::ser::into_writer(value, &mut res)
.with_kind(crate::ErrorKind::Serialization)?;
Ok(res)
}
IoFormat::Toml => serde_toml::to_vec(
&serde_toml::Value::try_from(value).with_kind(crate::ErrorKind::Serialization)?,
)
.with_kind(crate::ErrorKind::Serialization),
IoFormat::TomlPretty => serde_toml::to_string_pretty(
&serde_toml::Value::try_from(value).with_kind(crate::ErrorKind::Serialization)?,
)
.map(|s| s.into_bytes())
.with_kind(crate::ErrorKind::Serialization),
}
}
/// BLOCKING
pub fn from_reader<R: std::io::Read, T: for<'de> Deserialize<'de>>(
&self,
mut reader: R,
) -> Result<T, Error> {
match self {
IoFormat::Json | IoFormat::JsonPretty => {
serde_json::from_reader(reader).with_kind(crate::ErrorKind::Deserialization)
}
IoFormat::Yaml => {
serde_yaml::from_reader(reader).with_kind(crate::ErrorKind::Deserialization)
}
IoFormat::Cbor => {
serde_cbor::de::from_reader(reader).with_kind(crate::ErrorKind::Deserialization)
}
IoFormat::Toml | IoFormat::TomlPretty => {
let mut s = String::new();
reader
.read_to_string(&mut s)
.with_kind(crate::ErrorKind::Deserialization)?;
serde_toml::from_str(&s).with_kind(crate::ErrorKind::Deserialization)
}
}
}
pub fn from_slice<T: for<'de> Deserialize<'de>>(&self, slice: &[u8]) -> Result<T, Error> {
match self {
IoFormat::Json | IoFormat::JsonPretty => {
serde_json::from_slice(slice).with_kind(crate::ErrorKind::Deserialization)
}
IoFormat::Yaml => {
serde_yaml::from_slice(slice).with_kind(crate::ErrorKind::Deserialization)
}
IoFormat::Cbor => {
serde_cbor::de::from_reader(slice).with_kind(crate::ErrorKind::Deserialization)
}
IoFormat::Toml | IoFormat::TomlPretty => {
serde_toml::from_slice(slice).with_kind(crate::ErrorKind::Deserialization)
}
}
}
}
pub fn display_serializable<T: Serialize>(t: T, matches: &ArgMatches<'_>) {
let format = match matches.value_of("format").map(|f| f.parse()) {
Some(Ok(f)) => f,
Some(Err(_)) => {
eprintln!("unrecognized formatter");
exit(1)
}
None => IoFormat::default(),
};
format
.to_writer(std::io::stdout(), &t)
.expect("Error serializing result to stdout")
}
pub fn parse_stdin_deserializable<T: for<'de> Deserialize<'de>>(
stdin: &mut std::io::Stdin,
matches: &ArgMatches<'_>,
) -> Result<T, Error> {
let format = match matches.value_of("format").map(|f| f.parse()) {
Some(Ok(f)) => f,
Some(Err(_)) => {
eprintln!("unrecognized formatter");
exit(1)
}
None => IoFormat::default(),
};
format.from_reader(stdin)
}
#[derive(Debug, Clone, Copy)]
pub struct Duration(std::time::Duration);
impl Deref for Duration {
type Target = std::time::Duration;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<std::time::Duration> for Duration {
fn from(t: std::time::Duration) -> Self {
Duration(t)
}
}
impl std::str::FromStr for Duration {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let units_idx = s.find(|c: char| c.is_alphabetic()).ok_or_else(|| {
Error::new(
eyre!("Must specify units for duration"),
crate::ErrorKind::Deserialization,
)
})?;
let (num, units) = s.split_at(units_idx);
use std::time::Duration;
Ok(Duration(match units {
"d" if num.contains(".") => Duration::from_secs_f64(num.parse::<f64>()? * 86_400_f64),
"d" => Duration::from_secs(num.parse::<u64>()? * 86_400),
"h" if num.contains(".") => Duration::from_secs_f64(num.parse::<f64>()? * 3_600_f64),
"h" => Duration::from_secs(num.parse::<u64>()? * 3_600),
"m" if num.contains(".") => Duration::from_secs_f64(num.parse::<f64>()? * 60_f64),
"m" => Duration::from_secs(num.parse::<u64>()? * 60),
"s" if num.contains(".") => Duration::from_secs_f64(num.parse()?),
"s" => Duration::from_secs(num.parse()?),
"ms" if num.contains(".") => Duration::from_secs_f64(num.parse::<f64>()? / 1_000_f64),
"ms" => {
let millis: u128 = num.parse()?;
Duration::new((millis / 1_000) as u64, (millis % 1_000) as u32)
}
"us" | "µs" if num.contains(".") => {
Duration::from_secs_f64(num.parse::<f64>()? / 1_000_000_f64)
}
"us" | "µs" => {
let micros: u128 = num.parse()?;
Duration::new((micros / 1_000_000) as u64, (micros % 1_000_000) as u32)
}
"ns" if num.contains(".") => {
Duration::from_secs_f64(num.parse::<f64>()? / 1_000_000_000_f64)
}
"ns" => {
let nanos: u128 = num.parse()?;
Duration::new(
(nanos / 1_000_000_000) as u64,
(nanos % 1_000_000_000) as u32,
)
}
_ => {
return Err(Error::new(
eyre!("Invalid units for duration"),
crate::ErrorKind::Deserialization,
))
}
}))
}
}
impl std::fmt::Display for Duration {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let nanos = self.as_nanos();
match () {
_ if nanos % 86_400_000_000_000 == 0 => write!(f, "{}d", nanos / 86_400_000_000_000),
_ if nanos % 3_600_000_000_000 == 0 => write!(f, "{}h", nanos / 3_600_000_000_000),
_ if nanos % 60_000_000_000 == 0 => write!(f, "{}m", nanos / 60_000_000_000),
_ if nanos % 1_000_000_000 == 0 => write!(f, "{}s", nanos / 1_000_000_000),
_ if nanos % 1_000_000 == 0 => write!(f, "{}ms", nanos / 1_000_000),
_ if nanos % 1_000 == 0 => write!(f, "{}µs", nanos / 1_000),
_ => write!(f, "{}ns", nanos),
}
}
}
impl<'de> Deserialize<'de> for Duration {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserialize_from_str(deserializer)
}
}
impl Serialize for Duration {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serialize_display(self, serializer)
}
}
pub fn deserialize_number_permissive<
'de,
D: serde::de::Deserializer<'de>,
T: FromStr<Err = E> + num::cast::FromPrimitive,
E: std::fmt::Display,
>(
deserializer: D,
) -> std::result::Result<T, D::Error> {
struct Visitor<T: FromStr<Err = E> + num::cast::FromPrimitive, E>(std::marker::PhantomData<T>);
impl<'de, T: FromStr<Err = Err> + num::cast::FromPrimitive, Err: std::fmt::Display>
serde::de::Visitor<'de> for Visitor<T, Err>
{
type Value = T;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(formatter, "a parsable string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
v.parse().map_err(|e| serde::de::Error::custom(e))
}
fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
T::from_f64(v).ok_or_else(|| {
serde::de::Error::custom(format!(
"{} cannot be represented by the requested type",
v
))
})
}
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
T::from_u64(v).ok_or_else(|| {
serde::de::Error::custom(format!(
"{} cannot be represented by the requested type",
v
))
})
}
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
T::from_i64(v).ok_or_else(|| {
serde::de::Error::custom(format!(
"{} cannot be represented by the requested type",
v
))
})
}
}
deserializer.deserialize_str(Visitor(std::marker::PhantomData))
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Port(pub u16);
impl<'de> Deserialize<'de> for Port {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
//TODO: if number, be permissive
deserialize_number_permissive(deserializer).map(Port)
}
}
impl Serialize for Port {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serialize_display(&self.0, serializer)
}
}
#[derive(Debug, Clone)]
pub struct Reversible<T, Container = Vec<T>>
where
for<'a> &'a Container: IntoDoubleEndedIterator<&'a T>,
{
reversed: bool,
data: Container,
phantom: PhantomData<T>,
}
impl<T, Container> Reversible<T, Container>
where
for<'a> &'a Container: IntoDoubleEndedIterator<&'a T>,
{
pub fn new(data: Container) -> Self {
Reversible {
reversed: false,
data,
phantom: PhantomData,
}
}
pub fn reverse(&mut self) {
self.reversed = !self.reversed
}
pub fn iter(
&self,
) -> itertools::Either<
<&Container as IntoDoubleEndedIterator<&T>>::IntoIter,
std::iter::Rev<<&Container as IntoDoubleEndedIterator<&T>>::IntoIter>,
> {
let iter = IntoDoubleEndedIterator::into_iter(&self.data);
if self.reversed {
itertools::Either::Right(iter.rev())
} else {
itertools::Either::Left(iter)
}
}
}
impl<T, Container> Serialize for Reversible<T, Container>
where
for<'a> &'a Container: IntoDoubleEndedIterator<&'a T>,
T: Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let iter = IntoDoubleEndedIterator::into_iter(&self.data);
let mut seq_ser = serializer.serialize_seq(match iter.size_hint() {
(lower, Some(upper)) if lower == upper => Some(upper),
_ => None,
})?;
if self.reversed {
for elem in iter.rev() {
seq_ser.serialize_element(elem)?;
}
} else {
for elem in iter {
seq_ser.serialize_element(elem)?;
}
}
seq_ser.end()
}
}
impl<'de, T, Container> Deserialize<'de> for Reversible<T, Container>
where
for<'a> &'a Container: IntoDoubleEndedIterator<&'a T>,
Container: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(Reversible::new(Deserialize::deserialize(deserializer)?))
}
fn deserialize_in_place<D>(deserializer: D, place: &mut Self) -> Result<(), D::Error>
where
D: Deserializer<'de>,
{
Deserialize::deserialize_in_place(deserializer, &mut place.data)
}
}
pub struct KeyVal<K, V> {
pub key: K,
pub value: V,
}
impl<K: Serialize, V: Serialize> Serialize for KeyVal<K, V> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(1))?;
map.serialize_entry(&self.key, &self.value)?;
map.end()
}
}
impl<'de, K: Deserialize<'de>, V: Deserialize<'de>> Deserialize<'de> for KeyVal<K, V> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct Visitor<K, V>(PhantomData<(K, V)>);
impl<'de, K: Deserialize<'de>, V: Deserialize<'de>> serde::de::Visitor<'de> for Visitor<K, V> {
type Value = KeyVal<K, V>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "A map with a single element")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: serde::de::MapAccess<'de>,
{
let (key, value) = map
.next_entry()?
.ok_or_else(|| serde::de::Error::invalid_length(0, &"1"))?;
Ok(KeyVal { key, value })
}
}
deserializer.deserialize_map(Visitor(PhantomData))
}
}

View File

@@ -1,3 +1,4 @@
LABEL=green / ext4 discard,errors=remount-ro 0 1
LABEL=system-boot /media/boot-rw vfat defaults 0 1
/media/boot-rw /boot/firmware none defaults,bind,ro 0 0
LABEL=EMBASSY /embassy-os vfat defaults 0 1

View File

@@ -18,7 +18,9 @@ apt install -y \
sqlite3 \
wireless-tools \
net-tools \
ecryptfs-utils
ecryptfs-utils \
cifs-utils \
samba-common-bin
sed -i 's/"1"/"0"/g' /etc/apt/apt.conf.d/20auto-upgrades
sed -i 's/Restart=on-failure/Restart=always/g' /lib/systemd/system/tor@default.service
sed -i '/}/i \ \ \ \ application\/wasm \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ wasm;' /etc/nginx/mime.types

View File

@@ -39,6 +39,7 @@ sudo umount /tmp/eos-mnt
sudo mount ${OUTPUT_DEVICE}p3 /tmp/eos-mnt
sudo mkdir /tmp/eos-mnt/media/boot-rw
sudo mkdir /tmp/eos-mnt/embassy-os
sudo cp build/fstab /tmp/eos-mnt/etc/fstab
# Enter the appmgr directory, copy over the built EmbassyOS binaries and systemd services, edit the nginx config, then create the .ssh directory
cd appmgr/

View File

@@ -1,25 +1,31 @@
# Embassy Setup Wizard
## Instructions for running locally
## Development Environment Setup
**Make sure you have git, node, and npm installed**
**Make sure you have git, nvm (node, npm), and rust installed**
Install Ionic
```
node --version
v16.11.0
`npm i -g @ionic/cli`
npm --version
v8.0.0
```
Clone this repository
### Building Embassy UI
`git clone https://github.com/Start9Labs/embassy-os.git`
`cd embassy-os/setup-wizard`
`cd embassy-os`
Install dependencies
`cd setup-wizard/`
`npm i`
`npm --prefix . install @ionic/cli`
Copy `config-sample.json` to new file `config.json`
`npm --prefix . install`
Start the server
Copy `config-sample.json` and contents to new file `config.json`
`ionic serve`
**Start the development server**
`ionic serve`

View File

@@ -14,7 +14,7 @@ import { SuccessPageModule } from './pages/success/success.module'
import { InitPageModule } from './pages/init/init.module'
import { HomePageModule } from './pages/home/home.module'
import { LoadingPageModule } from './pages/loading/loading.module'
import { ProdKeyModalModule } from './pages/prod-key-modal/prod-key-modal.module'
import { ProdKeyModalModule } from './modals/prod-key-modal/prod-key-modal.module'
import { ProductKeyPageModule } from './pages/product-key/product-key.module'
import { RecoverPageModule } from './pages/recover/recover.module'

View File

@@ -0,0 +1,20 @@
import { NgModule } from '@angular/core'
import { CommonModule } from '@angular/common'
import { IonicModule } from '@ionic/angular'
import { FormsModule } from '@angular/forms'
import { CifsModal } from './cifs-modal.page'
@NgModule({
declarations: [
CifsModal,
],
imports: [
CommonModule,
FormsModule,
IonicModule,
],
exports: [
CifsModal,
],
})
export class CifsModalModule { }

View File

@@ -0,0 +1,81 @@
<ion-header>
<ion-toolbar>
<ion-title>
Connect Shared Folder
</ion-title>
</ion-toolbar>
</ion-header>
<ion-content class="ion-padding">
<form (ngSubmit)="submit()" #cifsForm="ngForm">
<p>Hostname *</p>
<ion-item>
<ion-input
id="hostname"
required
[(ngModel)]="cifs.hostname"
name="hostname"
#hostname="ngModel"
placeholder="e.g. My Computer, Bob's Laptop"
></ion-input>
</ion-item>
<p [hidden]="hostname.valid || hostname.pristine">
<ion-text color="danger">Hostname is required</ion-text>
</p>
<p>Path *</p>
<ion-item>
<ion-input
id="path"
required
[(ngModel)]="cifs.path"
name="path"
#path="ngModel"
placeholder="ex. /Desktop/my-folder'"
></ion-input>
</ion-item>
<p [hidden]="path.valid || path.pristine">
<ion-text color="danger">Path is required</ion-text>
</p>
<p>Username *</p>
<ion-item>
<ion-input
id="username"
required
[(ngModel)]="cifs.username"
name="username"
#username="ngModel"
placeholder="Enter username"
></ion-input>
</ion-item>
<p [hidden]="username.valid || username.pristine">
<ion-text color="danger">Username is required</ion-text>
</p>
<p>Password</p>
<ion-item>
<ion-input
id="password"
type="password"
[(ngModel)]="cifs.password"
name="password"
#password="ngModel"
></ion-input>
</ion-item>
<button hidden type="submit"></button>
</form>
</ion-content>
<ion-footer>
<ion-toolbar>
<ion-button class="ion-padding-end" slot="end" color="dark" fill="clear" (click)="cancel()">
Cancel
</ion-button>
<ion-button class="ion-padding-end" slot="end" color="dark" fill="clear" strong="true" [disabled]="!cifsForm.form.valid" (click)="submit()">
Verify
</ion-button>
</ion-toolbar>
</ion-footer>

View File

@@ -0,0 +1,3 @@
ion-content {
--ion-text-color: var(--ion-color-dark);
}

View File

@@ -0,0 +1,85 @@
import { Component } from '@angular/core'
import { AlertController, LoadingController, ModalController } from '@ionic/angular'
import { ApiService, BackupTarget, CifsBackupTarget, EmbassyOSRecoveryInfo } from 'src/app/services/api/api.service'
import { PasswordPage } from '../password/password.page'
@Component({
selector: 'cifs-modal',
templateUrl: 'cifs-modal.page.html',
styleUrls: ['cifs-modal.page.scss'],
})
export class CifsModal {
cifs = {
hostname: '',
path: '',
username: '',
password: '',
}
constructor (
private readonly modalController: ModalController,
private readonly apiService: ApiService,
private readonly loadingCtrl: LoadingController,
private readonly alertCtrl: AlertController,
) { }
cancel () {
this.modalController.dismiss()
}
async submit (): Promise<void> {
const loader = await this.loadingCtrl.create({
spinner: 'lines',
message: 'Connecting to shared folder...',
cssClass: 'loader',
})
await loader.present()
try {
const embassyOS = await this.apiService.verifyCifs(this.cifs)
this.presentModalPassword(embassyOS)
} catch (e) {
this.presentAlertFailed()
} finally {
loader.dismiss()
}
}
private async presentModalPassword (embassyOS: EmbassyOSRecoveryInfo): Promise<void> {
const target: CifsBackupTarget = {
type: 'cifs',
...this.cifs,
mountable: true,
'embassy-os': embassyOS,
}
const modal = await this.modalController.create({
component: PasswordPage,
componentProps: { target },
cssClass: 'alertlike-modal',
})
modal.onDidDismiss().then(res => {
if (res.role === 'success') {
this.modalController.dismiss({
cifs: this.cifs,
recoveryPassword: res.data.password,
}, 'success')
}
})
await modal.present()
}
private async presentAlertFailed (): Promise<void> {
const alert = await this.alertCtrl.create({
header: 'Connection Failed',
message: 'Unable to connect to shared folder. Ensure (1) target computer is connected to LAN, (2) target folder is being shared, and (3) hostname, path, and credentials are accurate.',
buttons: ['OK'],
})
alert.present()
}
}
interface MappedCifs {
hasValidBackup: boolean
cifs: CifsBackupTarget
}

View File

@@ -4,16 +4,17 @@ import { IonicModule } from '@ionic/angular'
import { FormsModule } from '@angular/forms'
import { PasswordPage } from './password.page'
import { PasswordPageRoutingModule } from './password-routing.module'
@NgModule({
declarations: [
PasswordPage,
],
imports: [
CommonModule,
FormsModule,
IonicModule,
PasswordPageRoutingModule,
],
declarations: [PasswordPage],
exports: [
PasswordPage,
],
})
export class PasswordPageModule { }

View File

@@ -17,13 +17,8 @@
</div>
<form (ngSubmit)="!!storageDrive ? submitPw() : verifyPw()">
<p class="input-label">
Password:
</p>
<ion-item
color="dark"
[class]="pwError ? 'error-border' : password && !!storageDrive ? 'success-border' : ''"
>
<p>Password</p>
<ion-item [class]="pwError ? 'error-border' : password && !!storageDrive ? 'success-border' : ''">
<ion-input
#focusInput
[(ngModel)]="password"
@@ -41,10 +36,8 @@
<p style="color: var(--ion-color-danger); font-size: x-small;">{{ pwError }}</p>
</div>
<ng-container *ngIf="!!storageDrive">
<p class="input-label">
Confirm Password:
</p>
<ion-item color="dark" [class]="verError ? 'error-border' : passwordVer ? 'success-border' : ''">
<p>Confirm Password</p>
<ion-item [class]="verError ? 'error-border' : passwordVer ? 'success-border' : ''">
<ion-input
[(ngModel)]="passwordVer"
[ngModelOptions]="{'standalone': true}"

View File

@@ -0,0 +1,3 @@
ion-content {
--ion-text-color: var(--ion-color-dark);
}

View File

@@ -1,6 +1,6 @@
import { Component, Input, ViewChild } from '@angular/core'
import { IonInput, ModalController } from '@ionic/angular'
import { DiskInfo, PartitionInfo } from 'src/app/services/api/api.service'
import { BackupTarget, DiskInfo } from 'src/app/services/api/api.service'
import * as argon2 from '@start9labs/argon2'
@Component({
@@ -10,7 +10,7 @@ import * as argon2 from '@start9labs/argon2'
})
export class PasswordPage {
@ViewChild('focusInput') elem: IonInput
@Input() recoveryPartition: PartitionInfo
@Input() target: BackupTarget
@Input() storageDrive: DiskInfo
pwError = ''
@@ -30,11 +30,11 @@ export class PasswordPage {
}
async verifyPw () {
if (!this.recoveryPartition || !this.recoveryPartition['embassy-os']) this.pwError = 'No recovery drive' // unreachable
if (!this.target || !this.target['embassy-os']) this.pwError = 'No recovery target' // unreachable
try {
argon2.verify(this.recoveryPartition['embassy-os']['password-hash'], this.password)
this.modalController.dismiss({ password: this.password })
argon2.verify(this.target['embassy-os']['password-hash'], this.password)
this.modalController.dismiss({ password: this.password }, 'success')
} catch (e) {
this.pwError = 'Incorrect password provided'
}
@@ -47,11 +47,11 @@ export class PasswordPage {
}
if (this.pwError || this.verError) return
this.modalController.dismiss({ password: this.password })
this.modalController.dismiss({ password: this.password }, 'success')
}
validate () {
if (!!this.recoveryPartition) return this.pwError = ''
if (!!this.target) return this.pwError = ''
if (this.passwordVer) {
this.checkVer()

View File

@@ -4,15 +4,17 @@ import { IonicModule } from '@ionic/angular'
import { FormsModule } from '@angular/forms'
import { ProdKeyModal } from './prod-key-modal.page'
import { ProdKeyModalRoutingModule } from './prod-key-modal-routing.module'
@NgModule({
declarations: [
ProdKeyModal,
],
imports: [
CommonModule,
FormsModule,
IonicModule,
ProdKeyModalRoutingModule,
],
declarations: [ProdKeyModal],
exports: [
ProdKeyModal,
],
})
export class ProdKeyModalModule { }

View File

@@ -12,7 +12,7 @@
<div style="padding-bottom: 16px;">
<p>Verify the product key for the chosen recovery drive.</p>
</div>
<ion-item color="dark">
<ion-item>
<ion-input
#focusInput
[(ngModel)]="productKey"

View File

@@ -0,0 +1,3 @@
ion-content {
--ion-text-color: var(--ion-color-dark);
}

View File

@@ -1,6 +1,6 @@
import { Component, Input, ViewChild } from '@angular/core'
import { IonInput, LoadingController, ModalController } from '@ionic/angular'
import { ApiService, PartitionInfo } from 'src/app/services/api/api.service'
import { ApiService, DiskBackupTarget } from 'src/app/services/api/api.service'
import { HttpService } from 'src/app/services/api/http.service'
@Component({
@@ -10,7 +10,7 @@ import { HttpService } from 'src/app/services/api/http.service'
})
export class ProdKeyModal {
@ViewChild('focusInput') elem: IonInput
@Input() recoveryPartition: PartitionInfo
@Input() target: DiskBackupTarget
error = ''
productKey = ''
@@ -36,10 +36,10 @@ export class ProdKeyModal {
await loader.present()
try {
await this.apiService.set02XDrive(this.recoveryPartition.logicalname)
await this.apiService.set02XDrive(this.target.logicalname)
this.httpService.productKey = this.productKey
await this.apiService.verifyProductKey()
this.modalController.dismiss({ productKey: this.productKey })
this.modalController.dismiss({ productKey: this.productKey }, 'success')
} catch (e) {
this.httpService.productKey = undefined
this.error = 'Invalid Product Key'

View File

@@ -3,7 +3,7 @@ import { CommonModule } from '@angular/common'
import { IonicModule } from '@ionic/angular'
import { FormsModule } from '@angular/forms'
import { EmbassyPage } from './embassy.page'
import { PasswordPageModule } from '../password/password.module'
import { PasswordPageModule } from '../../modals/password/password.module'
import { EmbassyPageRoutingModule } from './embassy-routing.module'
import { PipesModule } from 'src/app/pipes/pipe.module'

View File

@@ -1,6 +1,6 @@
<ion-content>
<ion-grid style="padding-top: 32px; height: 100%; max-width: 540px;">
<ion-row style="height: 100%;">
<ion-grid>
<ion-row>
<ion-col class="ion-text-center">
<div style="padding-bottom: 32px;">
@@ -14,29 +14,19 @@
</ion-card-header>
<ion-card-content class="ion-margin">
<ng-container *ngIf="!loading && !storageDrives.length">
<h2>No drives found</h2>
<p>Please connect a storage drive to your Embassy and refresh the page.</p>
<ion-button style="margin-top: 25px;" (click)="refresh()" color="light">
Refresh
</ion-button>
</ng-container>
<!-- loading -->
<ion-spinner *ngIf="loading; else loaded" class="center-spinner" name="lines"></ion-spinner>
<ion-item-group>
<ng-container *ngIf="loading">
<ion-item button lines="none">
<ion-avatar slot="start">
<ion-skeleton-text animated></ion-skeleton-text>
</ion-avatar>
<ion-label class="ion-text-wrap">
<ion-skeleton-text style="width: 80%; margin: 13px 0;" animated></ion-skeleton-text>
<ion-skeleton-text style="width: 60%; margin: 10px 0;" animated></ion-skeleton-text>
</ion-label>
</ion-item>
<!-- not loading -->
<ng-template #loaded>
<ng-container *ngIf="!storageDrives.length">
<h2>No drives found</h2>
<p>Please connect an storage drive to your Embassy and click "Refresh".</p>
</ng-container>
<ng-container *ngIf="storageDrives.length">
<ion-item (click)="chooseDrive(drive)" class="ion-margin-bottom" button lines="none" *ngFor="let drive of storageDrives" [disabled]="drive.capacity < 34359738368">
<ion-icon slot="start" name="save-outline"></ion-icon>
<ion-item-group *ngIf="storageDrives.length">
<ion-item (click)="chooseDrive(drive)" class="ion-margin-bottom" button lines="none" *ngFor="let drive of storageDrives">
<ion-icon slot="start" name="save-outline" size="large" color="light"></ion-icon>
<ion-label class="ion-text-wrap">
<h1>{{ drive.vendor || 'Unknown Vendor' }} - {{ drive.model || 'Unknown Model' }}</h1>
<h2>{{ drive.logicalname }} - {{ drive.capacity | convertBytes }}</h2>
@@ -47,8 +37,8 @@
</p>
</ion-label>
</ion-item>
</ng-container>
</ion-item-group>
</ion-item-group>
</ng-template>
</ion-card-content>
</ion-card>
</ion-col>

View File

@@ -1,9 +1,9 @@
import { Component } from '@angular/core'
import { AlertController, LoadingController, ModalController, NavController } from '@ionic/angular'
import { ApiService, DiskInfo } from 'src/app/services/api/api.service'
import { ApiService, DiskInfo, DiskRecoverySource } from 'src/app/services/api/api.service'
import { ErrorToastService } from 'src/app/services/error-toast.service'
import { StateService } from 'src/app/services/state.service'
import { PasswordPage } from '../password/password.page'
import { PasswordPage } from '../../modals/password/password.page'
@Component({
selector: 'app-embassy',
@@ -11,8 +11,7 @@ import { PasswordPage } from '../password/password.page'
styleUrls: ['embassy.page.scss'],
})
export class EmbassyPage {
storageDrives = []
selectedDrive: DiskInfo = null
storageDrives: DiskInfo[] = []
loading = true
constructor (
@@ -30,15 +29,14 @@ export class EmbassyPage {
}
async refresh () {
this.storageDrives = []
this.selectedDrive = null
this.loading = true
await this.getDrives()
}
async getDrives () {
try {
this.storageDrives = (await this.apiService.getDrives()).filter(d => !d.partitions.map(p => p.logicalname).includes(this.stateService.recoveryPartition?.logicalname))
const drives = await this.apiService.getDrives()
this.storageDrives = drives.filter(d => !d.partitions.map(p => p.logicalname).includes((this.stateService.recoverySource as DiskRecoverySource)?.logicalname))
} catch (e) {
this.errorToastService.present(e.message)
} finally {
@@ -60,14 +58,22 @@ export class EmbassyPage {
{
text: 'Continue',
handler: () => {
this.presentModalPassword(drive)
if (this.stateService.recoveryPassword) {
this.setupEmbassy(drive, this.stateService.recoveryPassword)
} else {
this.presentModalPassword(drive)
}
},
},
],
})
await alert.present()
} else {
this.presentModalPassword(drive)
if (this.stateService.recoveryPassword) {
this.setupEmbassy(drive, this.stateService.recoveryPassword)
} else {
this.presentModalPassword(drive)
}
}
}
@@ -80,31 +86,30 @@ export class EmbassyPage {
})
modal.onDidDismiss().then(async ret => {
if (!ret.data || !ret.data.password) return
const loader = await this.loadingCtrl.create({
message: 'Transferring encrypted data',
})
await loader.present()
this.stateService.storageDrive = drive
this.stateService.embassyPassword = ret.data.password
try {
await this.stateService.setupEmbassy()
if (!!this.stateService.recoveryPartition) {
await this.navCtrl.navigateForward(`/loading`)
} else {
await this.navCtrl.navigateForward(`/init`)
}
} catch (e) {
this.errorToastService.present(`${e.message}: ${e.details}`)
console.error(e.message)
console.error(e.details)
} finally {
loader.dismiss()
}
this.setupEmbassy(drive, ret.data.password)
})
await modal.present()
}
private async setupEmbassy (drive: DiskInfo, password: string): Promise<void> {
const loader = await this.loadingCtrl.create({
message: 'Transferring encrypted data. This could take a while...',
})
await loader.present()
try {
await this.stateService.setupEmbassy(drive.logicalname, password)
if (!!this.stateService.recoverySource) {
await this.navCtrl.navigateForward(`/loading`)
} else {
await this.navCtrl.navigateForward(`/init`)
}
} catch (e) {
this.errorToastService.present(`${e.message}: ${e.details}. Restart Embassy to try again.`)
console.error(e)
} finally {
loader.dismiss()
}
}
}

View File

@@ -3,7 +3,7 @@ import { CommonModule } from '@angular/common'
import { IonicModule } from '@ionic/angular'
import { FormsModule } from '@angular/forms'
import { HomePage } from './home.page'
import { PasswordPageModule } from '../password/password.module'
import { PasswordPageModule } from '../../modals/password/password.module'
import { HomePageRoutingModule } from './home-routing.module'

View File

@@ -1,6 +1,6 @@
<ion-content>
<ion-grid style="padding-top: 32px; height: 100%; max-width: 540px;">
<ion-row style="height: 100%;">
<ion-grid>
<ion-row>
<ion-col class="ion-text-center">
<div style="padding-bottom: 32px;">

View File

@@ -1,6 +1,6 @@
<ion-content>
<ion-grid style="padding-top: 32px; height: 100%; max-width: 540px;">
<ion-row style="height: 100%;">
<ion-grid>
<ion-row>
<ion-col class="ion-text-center">
<div style="padding-bottom: 32px;">

View File

@@ -1,6 +1,7 @@
import { Component } from '@angular/core'
import { interval, Observable, Subscription } from 'rxjs'
import { delay, finalize, take, tap } from 'rxjs/operators'
import { interval, Subscription } from 'rxjs'
import { finalize, take, tap } from 'rxjs/operators'
import { ApiService } from 'src/app/services/api/api.service'
import { StateService } from 'src/app/services/state.service'
@Component({
@@ -9,14 +10,18 @@ import { StateService } from 'src/app/services/state.service'
styleUrls: ['init.page.scss'],
})
export class InitPage {
progress: number
progress = 0
sub: Subscription
constructor (
private readonly apiService: ApiService,
public readonly stateService: StateService,
) { }
ngOnInit () {
// call setup.complete to tear down embassy.local and spin up embassy-[id].local
this.apiService.setupComplete()
this.sub = interval(130)
.pipe(
take(101),

View File

@@ -1,6 +1,6 @@
<ion-content color="light">
<ion-grid style="padding-top: 32px; height: 100%; max-width: 540px;">
<ion-row style="height: 100%;">
<ion-grid>
<ion-row>
<ion-col class="ion-text-center">
<div style="padding-bottom: 32px;">

View File

@@ -15,10 +15,10 @@ export class LoadingPage {
ngOnInit () {
this.stateService.pollDataTransferProgress()
const progSub = this.stateService.dataProgSubject.subscribe(async progress => {
if (progress === 1) {
const progSub = this.stateService.dataCompletionSubject.subscribe(async complete => {
if (complete) {
progSub.unsubscribe()
await this.navCtrl.navigateForward(`/success`)
await this.navCtrl.navigateForward(`/init`)
}
})
}

View File

@@ -1,16 +0,0 @@
import { NgModule } from '@angular/core'
import { RouterModule, Routes } from '@angular/router'
import { PasswordPage } from './password.page'
const routes: Routes = [
{
path: '',
component: PasswordPage,
},
]
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule],
})
export class PasswordPageRoutingModule { }

View File

@@ -1,16 +0,0 @@
import { NgModule } from '@angular/core'
import { RouterModule, Routes } from '@angular/router'
import { ProdKeyModal } from './prod-key-modal.page'
const routes: Routes = [
{
path: '',
component: ProdKeyModal,
},
]
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule],
})
export class ProdKeyModalRoutingModule { }

View File

@@ -3,7 +3,7 @@ import { CommonModule } from '@angular/common'
import { IonicModule } from '@ionic/angular'
import { FormsModule } from '@angular/forms'
import { ProductKeyPage } from './product-key.page'
import { PasswordPageModule } from '../password/password.module'
import { PasswordPageModule } from '../../modals/password/password.module'
import { ProductKeyPageRoutingModule } from './product-key-routing.module'
@NgModule({

View File

@@ -1,6 +1,6 @@
<ion-content>
<ion-grid style="padding-top: 32px; height: 100%; max-width: 540px;">
<ion-row style="height: 100%;">
<ion-grid>
<ion-row>
<ion-col class="ion-text-center">
<div style="padding-bottom: 32px;">
@@ -8,14 +8,14 @@
</div>
<ion-card color="dark">
<ion-card-header class="ion-text-center" style="padding-bottom: 8px;">
<ion-card-header style="padding-bottom: 8px;">
<ion-card-title>Enter Product Key</ion-card-title>
</ion-card-header>
<ion-card-content class="ion-margin">
<form (submit)="submit()" style="margin-bottom: 12px;">
<ion-item-group class="ion-padding-bottom">
<p class="input-label">Product Key</p>
<p class="ion-text-left">Product Key</p>
<ion-item color="dark">
<ion-icon slot="start" name="key-outline" style="margin-right: 16px;"></ion-icon>
<ion-input

View File

@@ -0,0 +1,5 @@
ion-item {
--border-style: solid;
--border-width: 1px;
--border-color: var(--ion-color-medium);
}

View File

@@ -0,0 +1,10 @@
<div class="inline">
<h2 *ngIf="hasValidBackup">
<ion-icon name="cloud-done" color="success"></ion-icon>
Embassy backup detected
</h2>
<h2 *ngIf="!hasValidBackup">
<ion-icon name="cloud-offline" color="danger"></ion-icon>
No Embassy backup
</h2>
</div>

View File

@@ -2,14 +2,15 @@ import { NgModule } from '@angular/core'
import { CommonModule } from '@angular/common'
import { IonicModule } from '@ionic/angular'
import { FormsModule } from '@angular/forms'
import { RecoverPage } from './recover.page'
import { PasswordPageModule } from '../password/password.module'
import { ProdKeyModalModule } from '../prod-key-modal/prod-key-modal.module'
import { DriveStatusComponent, RecoverPage } from './recover.page'
import { PasswordPageModule } from '../../modals/password/password.module'
import { ProdKeyModalModule } from '../../modals/prod-key-modal/prod-key-modal.module'
import { RecoverPageRoutingModule } from './recover-routing.module'
import { PipesModule } from 'src/app/pipes/pipe.module'
import { CifsModalModule } from 'src/app/modals/cifs-modal/cifs-modal.module'
@NgModule({
declarations: [RecoverPage, DriveStatusComponent],
imports: [
CommonModule,
FormsModule,
@@ -18,7 +19,7 @@ import { PipesModule } from 'src/app/pipes/pipe.module'
PasswordPageModule,
ProdKeyModalModule,
PipesModule,
CifsModalModule,
],
declarations: [RecoverPage],
})
export class RecoverPageModule { }

View File

@@ -1,6 +1,6 @@
<ion-content>
<ion-grid style="padding-top: 32px; height: 100%; max-width: 540px;">
<ion-row style="height: 100%;">
<ion-grid>
<ion-row>
<ion-col>
<div style="padding-bottom: 32px;" class="ion-text-center">
@@ -9,66 +9,53 @@
<ion-card color="dark">
<ion-card-header class="ion-text-center">
<ion-card-title>Select Recovery Drive</ion-card-title>
<ion-card-subtitle>Select the drive containing the Embassy you want to recover.</ion-card-subtitle>
<ion-card-title>Restore from Backup</ion-card-title>
<ion-card-subtitle>Select the shared folder or physical drive containing your Embassy backup</ion-card-subtitle>
</ion-card-header>
<ion-card-content class="ion-margin">
<ng-container *ngIf="!loading && !drives.length">
<h2>No recovery drives found</h2>
<p>Please connect a recovery drive to your Embassy and refresh the page.</p>
<ion-button
(click)="refresh()"
style="text-align:center"
class="claim-button"
>
Refresh
</ion-button>
</ng-container>
<ion-spinner *ngIf="loading" class="center-spinner" name="lines"></ion-spinner>
<ion-item-group>
<ng-container *ngIf="loading">
<ion-skeleton-text animated class="skeleton-header"></ion-skeleton-text>
<ion-item color="light" style="padding-bottom: 10px;">
<ion-avatar slot="start" style="margin-right: 24px;">
<ion-skeleton-text animated style="width: 30px; height: 30px; border-radius: 0;"></ion-skeleton-text>
</ion-avatar>
<!-- loaded -->
<ion-item-group *ngIf="!loading">
<!-- cifs -->
<h2 class="target-label">
Shared Network Folder
</h2>
<p class="ion-padding-bottom">
Using a shared folder is the recommended way to recover from backup, since it works with all Embassy hardware configurations.
To restore from a shared folder, please follow the <a href="https://docs.start9.com/user-manual/general/backups.html" target="blank" noreferrer>instructions</a>.
</p>
<!-- connect -->
<ion-item button lines="none" (click)="presentModalCifs()">
<ion-icon slot="start" name="folder-open-outline" size="large" color="light"></ion-icon>
<ion-label>Open Shared Folder</ion-label>
</ion-item>
<br />
<br />
<!-- drives -->
<h2 class="target-label">
Physical Drives
</h2>
<p class="ion-padding-bottom">
Warning! Plugging in more than one physical drive to Embassy can lead to power failure and data corruption.
To restore from a physical drive, please follow the <a href="https://docs.start9.com/user-manual/general/backups.html" target="blank" noreferrer>instructions</a>.
</p>
<ng-container *ngFor="let target of driveTargets">
<ion-item button *ngIf="target.drive as drive" [disabled]="!driveClickable(drive)" (click)="select(drive)">
<ion-icon slot="start" name="save-outline" size="large" color="light"></ion-icon>
<ion-label>
<ion-skeleton-text animated style="width: 100px; height: 20px; margin-bottom: 12px;"></ion-skeleton-text>
<ion-skeleton-text animated style="width: 50px; height: 16px; margin-bottom: 16px;"></ion-skeleton-text>
<ion-skeleton-text animated style="width: 100px;"></ion-skeleton-text>
<h1>{{ drive.label || drive.logicalname }}</h1>
<drive-status [hasValidBackup]="target.hasValidBackup"></drive-status>
<p>{{ drive.vendor || 'Unknown Vendor' }} - {{ drive.model || 'Unknown Model' }}</p>
<p>Capacity: {{ drive.capacity | convertBytes }}</p>
</ion-label>
</ion-item>
</ng-container>
<!-- loaded -->
<div *ngFor="let drive of drives" class="ion-padding-bottom">
<h2 class="drive-label">
{{ drive.vendor || 'Unknown Vendor' }} - {{ drive.model || 'Unknown Model' }} - {{ drive.capacity | convertBytes }}
</h2>
<ion-item lines="none" button *ngFor="let partition of drive.partitions" [disabled]="!partitionClickable(partition)" (click)="choosePartition(partition)">
<ion-icon slot="start" name="save-outline"></ion-icon>
<ion-label>
<h1>{{ partition.label || partition.logicalname }}</h1>
<h2>{{ partition.capacity | convertBytes }}</h2>
<p *ngIf="partitionClickable(partition)">
<ion-text color="success">
Embassy backup detected
</ion-text>
</p>
<p *ngIf="!partitionClickable(partition)">
<ion-text>
No Embassy backup detected
</ion-text>
</p>
</ion-label>
<div *ngIf="partition['embassy-os'] && partition['embassy-os'].full">
<ion-icon *ngIf="partition['embassy-os'].version.startsWith('0.2')" color="success" slot="end" name="lock-open-outline" size="large"></ion-icon>
<ion-icon *ngIf="!partition['embassy-os'].version.startsWith('0.2')" color="danger" slot="end" name="lock-closed-outline" size="large"></ion-icon>
</div>
</ion-item>
</div>
</ion-item-group>
</ion-card-content>
</ion-card>

View File

@@ -1,20 +1,4 @@
.selected {
border: 4px solid var(--ion-color-secondary);
box-shadow: 4px 4px 16px var(--ion-color-light);
}
.drive-label {
.target-label {
font-weight: bold;
padding-bottom: 6px;
}
.skeleton-header {
width: 180px;
height: 18px;
--ion-text-color-rgb: var(--ion-color-light-rgb);
margin-bottom: 6px;
}
ion-item {
padding-bottom: 6px;
}

View File

@@ -1,10 +1,12 @@
import { Component } from '@angular/core'
import { Component, Input } from '@angular/core'
import { AlertController, LoadingController, ModalController, NavController } from '@ionic/angular'
import { ApiService, DiskInfo, PartitionInfo } from 'src/app/services/api/api.service'
import { CifsModal } from 'src/app/modals/cifs-modal/cifs-modal.page'
import { ApiService, CifsBackupTarget, DiskBackupTarget, DiskRecoverySource, RecoverySource } from 'src/app/services/api/api.service'
import { ErrorToastService } from 'src/app/services/error-toast.service'
import { StateService } from 'src/app/services/state.service'
import { PasswordPage } from '../password/password.page'
import { ProdKeyModal } from '../prod-key-modal/prod-key-modal.page'
import { MappedDisk } from 'src/app/util/misc.util'
import { PasswordPage } from '../../modals/password/password.page'
import { ProdKeyModal } from '../../modals/prod-key-modal/prod-key-modal.page'
@Component({
selector: 'app-recover',
@@ -12,14 +14,14 @@ import { ProdKeyModal } from '../prod-key-modal/prod-key-modal.page'
styleUrls: ['recover.page.scss'],
})
export class RecoverPage {
selectedPartition: PartitionInfo = null
loading = true
drives: DiskInfo[] = []
driveTargets: MappedDisk[] = []
hasShownGuidAlert = false
constructor (
private readonly apiService: ApiService,
private readonly navCtrl: NavController,
private readonly modalCtrl: ModalController,
private readonly modalController: ModalController,
private readonly alertCtrl: AlertController,
private readonly loadingCtrl: LoadingController,
@@ -32,25 +34,43 @@ export class RecoverPage {
}
async refresh () {
this.selectedPartition = null
this.loading = true
await this.getDrives()
}
partitionClickable (partition: PartitionInfo) {
return partition['embassy-os']?.full && (this.stateService.hasProductKey || this.is02x(partition))
driveClickable (drive: DiskBackupTarget) {
return drive['embassy-os']?.full && (this.stateService.hasProductKey || this.is02x(drive))
}
async getDrives () {
this.driveTargets = []
try {
const drives = await this.apiService.getDrives()
this.drives = drives.filter(d => d.partitions.length)
drives.filter(d => d.partitions.length).forEach(d => {
d.partitions.forEach(p => {
this.driveTargets.push(
{
hasValidBackup: p['embassy-os']?.full,
drive: {
type: 'disk',
vendor: d.vendor,
model: d.model,
logicalname: p.logicalname,
label: p.label,
capacity: p.capacity,
used: p.used,
'embassy-os': p['embassy-os'],
},
},
)
})
})
const importableDrive = drives.find(d => !!d.guid)
if (!!importableDrive && !this.hasShownGuidAlert) {
const alert = await this.alertCtrl.create({
header: 'Embassy Drive Detected',
message: 'A valid EmbassyOS data drive has been detected. To use this drive in its current state, simply click "Use Drive" below.',
message: 'A valid EmbassyOS data drive has been detected. To use this drive as-is, simply click "Use Drive" below.',
buttons: [
{
role: 'cancel',
@@ -74,14 +94,69 @@ export class RecoverPage {
}
}
async importDrive (guid: string) {
async presentModalCifs (): Promise<void> {
const modal = await this.modalCtrl.create({
component: CifsModal,
})
modal.onDidDismiss().then(res => {
if (res.role === 'success') {
const { hostname, path, username, password } = res.data.cifs
this.stateService.recoverySource = {
type: 'cifs',
hostname,
path,
username,
password,
}
this.stateService.recoveryPassword = res.data.recoveryPassword
this.navCtrl.navigateForward('/embassy')
}
})
await modal.present()
}
async select (target: DiskBackupTarget) {
if (target['embassy-os'].version.startsWith('0.2')) {
return this.selectRecoverySource(target.logicalname)
}
if (this.stateService.hasProductKey) {
const modal = await this.modalController.create({
component: PasswordPage,
componentProps: { target },
cssClass: 'alertlike-modal',
})
modal.onDidDismiss().then(res => {
if (res.data && res.data.password) {
this.selectRecoverySource(target.logicalname, res.data.password)
}
})
await modal.present()
// if no product key, it means they are an upgrade kit user
} else {
const modal = await this.modalController.create({
component: ProdKeyModal,
componentProps: { target },
cssClass: 'alertlike-modal',
})
modal.onDidDismiss().then(res => {
if (res.data && res.data.productKey) {
this.selectRecoverySource(target.logicalname)
}
})
await modal.present()
}
}
private async importDrive (guid: string) {
const loader = await this.loadingCtrl.create({
message: 'Importing Drive',
})
await loader.present()
try {
await this.stateService.importDrive(guid)
await this.navCtrl.navigateForward(`/success`)
await this.navCtrl.navigateForward(`/init`)
} catch (e) {
this.errorToastService.present(`${e.message}: ${e.data}`)
} finally {
@@ -89,60 +164,26 @@ export class RecoverPage {
}
}
async choosePartition (partition: PartitionInfo) {
this.selectedPartition = partition
if (partition['embassy-os'].version.startsWith('0.2')) {
return this.selectRecoveryPartition()
}
if (this.stateService.hasProductKey) {
const modal = await this.modalController.create({
component: PasswordPage,
componentProps: {
recoveryPartition: this.selectedPartition,
},
cssClass: 'alertlike-modal',
})
modal.onDidDismiss().then(async ret => {
if (!ret.data) {
this.selectedPartition = null
} else if (ret.data.password) {
this.selectRecoveryPartition(ret.data.password)
}
})
await modal.present()
// if no product key, it means they are an upgrade kit user
} else {
const modal = await this.modalController.create({
component: ProdKeyModal,
componentProps: {
recoveryPartition: this.selectedPartition,
},
cssClass: 'alertlike-modal',
})
modal.onDidDismiss().then(async ret => {
if (!ret.data) {
this.selectedPartition = null
} else if (ret.data.productKey) {
this.selectRecoveryPartition()
}
})
await modal.present()
private async selectRecoverySource (logicalname: string, password?: string) {
this.stateService.recoverySource = {
type: 'disk',
logicalname,
}
this.stateService.recoveryPassword = password
this.navCtrl.navigateForward(`/embassy`)
}
async selectRecoveryPartition (password?: string) {
this.stateService.recoveryPartition = this.selectedPartition
if (password) {
this.stateService.recoveryPassword = password
}
await this.navCtrl.navigateForward(`/embassy`)
}
private is02x (partition: PartitionInfo): boolean {
return !this.stateService.hasProductKey && partition['embassy-os']?.version.startsWith('0.2')
private is02x (drive: DiskBackupTarget): boolean {
return !this.stateService.hasProductKey && drive['embassy-os']?.version.startsWith('0.2')
}
}
@Component({
selector: 'drive-status',
templateUrl: './drive-status.component.html',
styleUrls: ['./recover.page.scss'],
})
export class DriveStatusComponent {
@Input() hasValidBackup: boolean
}

View File

@@ -1,16 +0,0 @@
import { NgModule } from '@angular/core'
import { RouterModule, Routes } from '@angular/router'
import { SuccessPage } from './success.page'
const routes: Routes = [
{
path: '',
component: SuccessPage,
},
]
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule],
})
export class SuccessPageRoutingModule { }

View File

@@ -3,17 +3,13 @@ import { CommonModule } from '@angular/common'
import { IonicModule } from '@ionic/angular'
import { FormsModule } from '@angular/forms'
import { SuccessPage } from './success.page'
import { PasswordPageModule } from '../password/password.module'
import { SuccessPageRoutingModule } from './success-routing.module'
import { PasswordPageModule } from '../../modals/password/password.module'
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
SuccessPageRoutingModule,
PasswordPageModule,
],
declarations: [SuccessPage],

View File

@@ -31,7 +31,7 @@
For a list of recommended browsers, click <a href="https://docs.start9.com/user-manual/connecting.html" target="_blank" rel="noreferrer"><b>here</b></a>.
</p>
<br />
<p class="input-label">Tor Address</p>
<p>Tor Address</p>
<ion-item lines="none" color="dark">
<ion-label class="ion-text-wrap">
<code><ion-text color="light">{{ stateService.torAddress }}</ion-text></code>
@@ -81,7 +81,7 @@
<ion-icon slot="end" name="download-outline"></ion-icon>
</ion-button>
<p class="input-label">LAN Address</p>
<p>LAN Address</p>
<ion-item lines="none" color="dark">
<ion-label class="ion-text-wrap">
<code><ion-text color="light">{{ stateService.lanAddress }}</ion-text></code>

View File

@@ -6,10 +6,11 @@ export abstract class ApiService {
abstract getRecoveryStatus (): Promise<RecoveryStatusRes> // setup.recovery.status
// encrypted
abstract verifyCifs (cifs: VerifyCifs): Promise<EmbassyOSRecoveryInfo> // setup.cifs.verify
abstract verifyProductKey (): Promise<void> // echo - throws error if invalid
abstract verify03XPassword (logicalname: string, password: string): Promise<boolean> // setup.recovery.test-password
abstract importDrive (guid: string): Promise<SetupEmbassyRes> // setup.execute
abstract setupEmbassy (setupInfo: SetupEmbassyReq): Promise<SetupEmbassyRes> // setup.execute
abstract setupComplete (): Promise<void> // setup.complete
}
export interface GetStatusRes {
@@ -17,11 +18,13 @@ export interface GetStatusRes {
migrating: boolean
}
export type VerifyCifs = Omit<CifsRecoverySource, 'type'>
export interface SetupEmbassyReq {
'embassy-logicalname': string
'embassy-password': string
'recovery-partition'?: PartitionInfo
'recovery-password'?: string
'recovery-source': RecoverySource | null
'recovery-password': string | null
}
export interface SetupEmbassyRes {
@@ -30,6 +33,50 @@ export interface SetupEmbassyRes {
'root-ca': string
}
export type BackupTarget = DiskBackupTarget | CifsBackupTarget
export interface EmbassyOSRecoveryInfo {
version: string
full: boolean
'password-hash': string | null
'wrapped-key': string | null
}
export interface DiskBackupTarget {
type: 'disk'
vendor: string | null
model: string | null
logicalname: string | null
label: string | null
capacity: number
used: number | null
'embassy-os': EmbassyOSRecoveryInfo | null
}
export interface CifsBackupTarget {
type: 'cifs'
hostname: string
path: string
username: string
mountable: boolean
'embassy-os': EmbassyOSRecoveryInfo | null
}
export type RecoverySource = DiskRecoverySource | CifsRecoverySource
export interface DiskRecoverySource {
type: 'disk'
logicalname: string // partition logicalname
}
export interface CifsRecoverySource {
type: 'cifs'
hostname: string
path: string
username: string
password: string | null
}
export interface DiskInfo {
logicalname: string,
vendor: string | null,
@@ -42,6 +89,7 @@ export interface DiskInfo {
export interface RecoveryStatusRes {
'bytes-transferred': number
'total-bytes': number
complete: boolean
}
export interface PartitionInfo {
@@ -49,11 +97,5 @@ export interface PartitionInfo {
label: string | null,
capacity: number,
used: number | null,
'embassy-os': EmbassyOsRecoveryInfo | null,
}
export interface EmbassyOsRecoveryInfo {
version: string,
full: boolean, // contains full embassy backup
'password-hash': string | null, // null for 0.2.x
'embassy-os': EmbassyOSRecoveryInfo | null,
}

View File

@@ -151,7 +151,6 @@ export enum Method {
export interface RPCOptions {
method: string
// @TODO what are valid params? object, bool?
params?: {
[param: string]: string | number | boolean | object | string[] | number[];
}

View File

@@ -1,5 +1,5 @@
import { Injectable } from '@angular/core'
import { ApiService, DiskInfo, GetStatusRes, RecoveryStatusRes, SetupEmbassyReq, SetupEmbassyRes } from './api.service'
import { ApiService, DiskInfo, EmbassyOSRecoveryInfo, GetStatusRes, RecoverySource, RecoveryStatusRes, SetupEmbassyReq, SetupEmbassyRes, VerifyCifs } from './api.service'
import { HttpService } from './http.service'
@Injectable({
@@ -43,6 +43,14 @@ export class LiveApiService extends ApiService {
// ** ENCRYPTED **
async verifyCifs (params: VerifyCifs) {
params.path = params.path.replace('/\\/g', '/')
return this.http.rpcRequest<EmbassyOSRecoveryInfo>({
method: 'setup.cifs.verify',
params,
})
}
async verifyProductKey () {
return this.http.rpcRequest<void>({
method: 'echo',
@@ -50,13 +58,6 @@ export class LiveApiService extends ApiService {
})
}
async verify03XPassword (logicalname: string, password: string) {
return this.http.rpcRequest<boolean>({
method: 'setup.recovery.test-password',
params: { logicalname, password },
})
}
async importDrive (guid: string) {
const res = await this.http.rpcRequest<SetupEmbassyRes>({
method: 'setup.attach',
@@ -70,6 +71,10 @@ export class LiveApiService extends ApiService {
}
async setupEmbassy (setupInfo: SetupEmbassyReq) {
if (setupInfo['recovery-source'].type === 'cifs') {
setupInfo['recovery-source'].path = setupInfo['recovery-source'].path.replace('/\\/g', '/')
}
const res = await this.http.rpcRequest<SetupEmbassyRes>({
method: 'setup.execute',
params: setupInfo as any,
@@ -80,4 +85,11 @@ export class LiveApiService extends ApiService {
'root-ca': btoa(res['root-ca']),
}
}
async setupComplete () {
await this.http.rpcRequest<SetupEmbassyRes>({
method: 'setup.complete',
params: { },
})
}
}

View File

@@ -1,6 +1,6 @@
import { Injectable } from '@angular/core'
import { pauseFor } from 'src/app/util/misc.util'
import { ApiService, SetupEmbassyReq } from './api.service'
import { ApiService, RecoverySource, SetupEmbassyReq, VerifyCifs } from './api.service'
let tries = 0
@@ -27,8 +27,8 @@ export class MockApiService extends ApiService {
await pauseFor(1000)
return [
{
vendor: 'Vendor',
model: 'Model',
vendor: 'Samsung',
model: 'SATA',
logicalname: '/dev/sda',
guid: 'theguid',
partitions: [
@@ -50,16 +50,16 @@ export class MockApiService extends ApiService {
capacity: 150000,
},
{
vendor: 'Vendor',
model: 'Model',
vendor: 'Samsung',
model: null,
logicalname: 'dev/sdb',
partitions: [],
capacity: 34359738369,
guid: null,
},
{
vendor: 'Vendor',
model: 'Model',
vendor: 'Crucial',
model: 'MX500',
logicalname: 'dev/sdc',
guid: null,
partitions: [
@@ -72,6 +72,7 @@ export class MockApiService extends ApiService {
version: '0.3.3',
full: true,
'password-hash': 'asdfasdfasdf',
'wrapped-key': '',
},
},
{
@@ -84,6 +85,7 @@ export class MockApiService extends ApiService {
full: true,
// password is 'asdfasdf'
'password-hash': '$argon2d$v=19$m=1024,t=1,p=1$YXNkZmFzZGZhc2RmYXNkZg$Ceev1I901G6UwU+hY0sHrFZ56D+o+LNJ',
'wrapped-key': '',
},
},
{
@@ -95,14 +97,15 @@ export class MockApiService extends ApiService {
version: '0.3.3',
full: false,
'password-hash': 'asdfasdfasdf',
'wrapped-key': '',
},
},
],
capacity: 100000,
},
{
vendor: 'Vendor',
model: 'Model',
vendor: 'Sandisk',
model: null,
logicalname: '/dev/sdd',
guid: null,
partitions: [
@@ -115,6 +118,7 @@ export class MockApiService extends ApiService {
version: '0.2.7',
full: true,
'password-hash': 'asdfasdfasdf',
'wrapped-key': '',
},
},
],
@@ -133,21 +137,27 @@ export class MockApiService extends ApiService {
return {
'bytes-transferred': tries,
'total-bytes': 4,
complete: tries === 4
}
}
// ** ENCRYPTED **
async verifyCifs (params: VerifyCifs) {
await pauseFor(1000)
return {
version: '0.3.0',
full: true,
'password-hash': '$argon2d$v=19$m=1024,t=1,p=1$YXNkZmFzZGZhc2RmYXNkZg$Ceev1I901G6UwU+hY0sHrFZ56D+o+LNJ',
'wrapped-key': '',
}
}
async verifyProductKey () {
await pauseFor(1000)
return
}
async verify03XPassword (logicalname: string, password: string) {
await pauseFor(2000)
return password.length > 8
}
async importDrive (guid: string) {
await pauseFor(3000)
return setupRes
@@ -158,25 +168,13 @@ export class MockApiService extends ApiService {
return setupRes
}
async getRecoveryDrives () {
await pauseFor(2000)
return [
{
logicalname: 'Name1',
version: '0.3.3',
name: 'My Embassy',
},
{
logicalname: 'Name2',
version: '0.2.7',
name: 'My Embassy',
},
]
async setupComplete () {
await pauseFor(1000)
}
}
const rootCA =
`-----BEGIN CERTIFICATE-----
`-----BEGIN CERTIFICATE-----
MIIDpzCCAo+gAwIBAgIRAIIuOarlQETlUQEOZJGZYdIwDQYJKoZIhvcNAQELBQAw
bTELMAkGA1UEBhMCVVMxFTATBgNVBAoMDEV4YW1wbGUgQ29ycDEOMAwGA1UECwwF
U2FsZXMxCzAJBgNVBAgMAldBMRgwFgYDVQQDDA93d3cuZXhhbXBsZS5jb20xEDAO

View File

@@ -1,6 +1,6 @@
import { Injectable } from '@angular/core'
import { BehaviorSubject } from 'rxjs'
import { ApiService, DiskInfo, PartitionInfo } from './api/api.service'
import { ApiService, RecoverySource } from './api/api.service'
import { ErrorToastService } from './error-toast.service'
import { pauseFor } from '../util/misc.util'
@@ -14,13 +14,12 @@ export class StateService {
polling = false
embassyLoaded = false
storageDrive: DiskInfo
embassyPassword: string
recoveryPartition: PartitionInfo
recoverySource: RecoverySource
recoveryPassword: string
dataTransferProgress: { bytesTransferred: number; totalBytes: number } | null
dataTransferProgress: { bytesTransferred: number, totalBytes: number, complete: boolean } | null
dataProgress = 0
dataProgSubject = new BehaviorSubject(this.dataProgress)
dataCompletionSubject = new BehaviorSubject(false)
torAddress: string
lanAddress: string
@@ -33,47 +32,48 @@ export class StateService {
async pollDataTransferProgress () {
this.polling = true
await pauseFor(1000)
await pauseFor(500)
if (
this.dataTransferProgress?.totalBytes &&
this.dataTransferProgress.bytesTransferred === this.dataTransferProgress.totalBytes
) return
this.dataTransferProgress?.complete
) {
this.dataCompletionSubject.next(true)
return
}
let progress
try {
progress = await this.apiService.getRecoveryStatus()
} catch (e) {
this.errorToastService.present(`${e.message}: ${e.details}`)
this.errorToastService.present(`${e.message}: ${e.details}.\nRestart Embassy to try again.`)
}
if (progress) {
this.dataTransferProgress = {
bytesTransferred: progress['bytes-transferred'],
totalBytes: progress['total-bytes'],
complete: progress.complete,
}
if (this.dataTransferProgress.totalBytes) {
this.dataProgress = this.dataTransferProgress.bytesTransferred / this.dataTransferProgress.totalBytes
this.dataProgSubject.next(this.dataProgress)
}
}
this.pollDataTransferProgress()
setTimeout(() => this.pollDataTransferProgress(), 0) // prevent call stack from growing
}
async importDrive (guid: string) : Promise<void> {
async importDrive (guid: string): Promise<void> {
const ret = await this.apiService.importDrive(guid)
this.torAddress = ret['tor-address']
this.lanAddress = ret['lan-address']
this.cert = ret['root-ca']
}
async setupEmbassy () : Promise<void> {
async setupEmbassy (storageLogicalname: string, password: string): Promise<void> {
const ret = await this.apiService.setupEmbassy({
'embassy-logicalname': this.storageDrive.logicalname,
'embassy-password': this.embassyPassword,
'recovery-partition': this.recoveryPartition,
'recovery-password': this.recoveryPassword,
'embassy-logicalname': storageLogicalname,
'embassy-password': password,
'recovery-source': this.recoverySource || null,
'recovery-password': this.recoveryPassword || null,
})
this.torAddress = ret['tor-address']
this.lanAddress = ret['lan-address']

View File

@@ -1,3 +1,10 @@
import { DiskBackupTarget } from '../services/api/api.service'
export interface MappedDisk {
hasValidBackup: boolean
drive: DiskBackupTarget
}
export const pauseFor = (ms: number) => {
return new Promise(resolve => setTimeout(resolve, ms))
}

View File

@@ -25,8 +25,29 @@
@import "~@ionic/angular/css/text-transformation.css";
@import "~@ionic/angular/css/flex-utils.css";
ion-content {
--background: var(--ion-color-medium);
}
ion-grid {
padding-top: 32px;
height: 100%;
max-width: 600px;
}
ion-row {
height: 100%;
}
ion-item {
--color: var(--ion-color-light);
}
ion-toolbar {
--ion-background-color: var(--ion-color-light);
--ion-background-color: var(--ion-color-light);
ion-title {
color: var(--ion-color-dark);
}
}
ion-avatar {
@@ -34,24 +55,11 @@ ion-avatar {
height: 27px;
}
ion-alert {
.alert-button {
color: var(--ion-color-dark) !important;
}
}
ion-button {
--color: var(--ion-color-dark) !important;
}
ion-item {
--highlight-color-valid: transparent;
--highlight-color-invalid: transparent;
--border-radius: 4px;
--border-style: solid;
--border-width: 1px;
--border-color: var(--ion-color-light);
}
ion-card-title {
@@ -69,6 +77,11 @@ ion-toast {
--color: white;
}
.center-spinner {
height: 20vh;
width: 100%;
}
.inline {
* {
display: inline-block;
@@ -93,13 +106,6 @@ ion-toast {
top: 64px;
}
.input-label {
text-align: left;
padding-bottom: 2px;
font-size: small;
font-weight: bold;
}
.error-border {
border: 2px solid var(--ion-color-danger);
border-radius: 4px;
@@ -111,9 +117,8 @@ ion-toast {
}
.modal-wrapper.sc-ion-modal-md {
border-radius: 6px;
border: 2px solid rgba(255,255,255,.03);
box-shadow: 0 0 70px 70px black;
border-radius: 4px;
border: 1px solid rgba(255,255,255,.03);
}
.modal-wrapper {

Some files were not shown because too many files have changed in this diff Show More