feat: Exposing the rsync that we have to the js (#1907)

chore: Make the commit do by checksum.

chore: Remove the logging at the start

chore: use the defaults of the original.

chore: Convert the error into just the source.

chore: Remove some of the unwraps
This commit is contained in:
J M
2022-11-09 12:19:08 -07:00
committed by Aiden McClelland
parent 0e82b6981f
commit 67b54ac1eb
32 changed files with 2001 additions and 439 deletions

View File

@@ -221,6 +221,7 @@ jobs:
sh -c '
apt-get update &&
apt-get install -y ca-certificates &&
apt-get install -y rsync &&
cd /home/rust/src &&
mkdir -p ~/.cargo/bin &&
tar -zxvf nextest-aarch64.tar.gz -C ${CARGO_HOME:-~/.cargo}/bin &&

View File

@@ -112,7 +112,7 @@ product_key.txt:
if [ "$(KEY)" != "" ]; then $(shell which echo) -n "$(KEY)" > product_key.txt; fi
echo >> product_key.txt
snapshots: libs/snapshot-creator/Cargo.toml
snapshots: libs/snapshot_creator/Cargo.toml
cd libs/ && ./build-v8-snapshot.sh
cd libs/ && ./build-arm-v8-snapshot.sh

13
backend/Cargo.lock generated
View File

@@ -1746,7 +1746,10 @@ dependencies = [
"futures",
"models",
"pin-project",
"serde",
"tokio",
"tokio-stream",
"tracing",
]
[[package]]
@@ -2452,13 +2455,23 @@ dependencies = [
name = "models"
version = "0.1.0"
dependencies = [
"bollard",
"color-eyre",
"ed25519-dalek",
"embassy_container_init",
"emver",
"mbrman",
"openssl",
"patch-db",
"rand 0.8.5",
"rpc-toolkit",
"serde",
"serde_json",
"sqlx",
"thiserror",
"tokio",
"torut",
"tracing",
]
[[package]]

View File

@@ -76,7 +76,7 @@ fd-lock-rs = "0.1.4"
futures = "0.3.21"
git-version = "0.3.5"
helpers = { path = "../libs/helpers" }
embassy_container_init = { path = "../libs/embassy-container-init" }
embassy_container_init = { path = "../libs/embassy_container_init" }
hex = "0.4.3"
hmac = "0.12.1"
http = "0.2.8"

View File

@@ -1,295 +1,5 @@
use std::fmt::Display;
use color_eyre::eyre::eyre;
use http::uri::InvalidUri;
use models::InvalidId;
use patch_db::Revision;
use rpc_toolkit::yajrc::RpcError;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ErrorKind {
Unknown = 1,
Filesystem = 2,
Docker = 3,
ConfigSpecViolation = 4,
ConfigRulesViolation = 5,
NotFound = 6,
IncorrectPassword = 7,
VersionIncompatible = 8,
Network = 9,
Registry = 10,
Serialization = 11,
Deserialization = 12,
Utf8 = 13,
ParseVersion = 14,
IncorrectDisk = 15,
// Nginx = 16,
Dependency = 17,
ParseS9pk = 18,
ParseUrl = 19,
DiskNotAvailable = 20,
BlockDevice = 21,
InvalidOnionAddress = 22,
Pack = 23,
ValidateS9pk = 24,
DiskCorrupted = 25, // Remove
Tor = 26,
ConfigGen = 27,
ParseNumber = 28,
Database = 29,
InvalidPackageId = 30,
InvalidSignature = 31,
Backup = 32,
Restore = 33,
Authorization = 34,
AutoConfigure = 35,
Action = 36,
RateLimited = 37,
InvalidRequest = 38,
MigrationFailed = 39,
Uninitialized = 40,
ParseNetAddress = 41,
ParseSshKey = 42,
SoundError = 43,
ParseTimestamp = 44,
ParseSysInfo = 45,
Wifi = 46,
Journald = 47,
DiskManagement = 48,
OpenSsl = 49,
PasswordHashGeneration = 50,
DiagnosticMode = 51,
ParseDbField = 52,
Duplicate = 53,
MultipleErrors = 54,
Incoherent = 55,
InvalidBackupTargetId = 56,
ProductKeyMismatch = 57,
LanPortConflict = 58,
Javascript = 59,
Pem = 60,
TLSInit = 61,
HttpRange = 62,
ContentLength = 63,
BytesError = 64,
InvalidIP = 65,
JoinError = 66,
AsciiError = 67,
NoHost = 68,
SignError = 69,
}
impl ErrorKind {
pub fn as_str(&self) -> &'static str {
use ErrorKind::*;
match self {
Unknown => "Unknown Error",
Filesystem => "Filesystem I/O Error",
Docker => "Docker Error",
ConfigSpecViolation => "Config Spec Violation",
ConfigRulesViolation => "Config Rules Violation",
NotFound => "Not Found",
IncorrectPassword => "Incorrect Password",
VersionIncompatible => "Version Incompatible",
Network => "Network Error",
Registry => "Registry Error",
Serialization => "Serialization Error",
Deserialization => "Deserialization Error",
Utf8 => "UTF-8 Parse Error",
ParseVersion => "Version Parsing Error",
IncorrectDisk => "Incorrect Disk",
// Nginx => "Nginx Error",
Dependency => "Dependency Error",
ParseS9pk => "S9PK Parsing Error",
ParseUrl => "URL Parsing Error",
DiskNotAvailable => "Disk Not Available",
BlockDevice => "Block Device Error",
InvalidOnionAddress => "Invalid Onion Address",
Pack => "Pack Error",
ValidateS9pk => "S9PK Validation Error",
DiskCorrupted => "Disk Corrupted", // Remove
Tor => "Tor Daemon Error",
ConfigGen => "Config Generation Error",
ParseNumber => "Number Parsing Error",
Database => "Database Error",
InvalidPackageId => "Invalid Package ID",
InvalidSignature => "Invalid Signature",
Backup => "Backup Error",
Restore => "Restore Error",
Authorization => "Unauthorized",
AutoConfigure => "Auto-Configure Error",
Action => "Action Failed",
RateLimited => "Rate Limited",
InvalidRequest => "Invalid Request",
MigrationFailed => "Migration Failed",
Uninitialized => "Uninitialized",
ParseNetAddress => "Net Address Parsing Error",
ParseSshKey => "SSH Key Parsing Error",
SoundError => "Sound Interface Error",
ParseTimestamp => "Timestamp Parsing Error",
ParseSysInfo => "System Info Parsing Error",
Wifi => "WiFi Internal Error",
Journald => "Journald Error",
DiskManagement => "Disk Management Error",
OpenSsl => "OpenSSL Internal Error",
PasswordHashGeneration => "Password Hash Generation Error",
DiagnosticMode => "Embassy is in Diagnostic Mode",
ParseDbField => "Database Field Parse Error",
Duplicate => "Duplication Error",
MultipleErrors => "Multiple Errors",
Incoherent => "Incoherent",
InvalidBackupTargetId => "Invalid Backup Target ID",
ProductKeyMismatch => "Incompatible Product Keys",
LanPortConflict => "Incompatible LAN Port Configuration",
Javascript => "Javascript Engine Error",
Pem => "PEM Encoding Error",
TLSInit => "TLS Backend Initialize Error",
HttpRange => "No Support for Web Server HTTP Ranges",
ContentLength => "Request has no content length header",
BytesError => "Could not get the bytes for this request",
InvalidIP => "Could not parse this IP address",
JoinError => "Join Handle Error",
AsciiError => "Could not parse ascii text",
NoHost => "No Host header ",
SignError => "Signing error",
}
}
}
impl Display for ErrorKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
#[derive(Debug)]
pub struct Error {
pub source: color_eyre::eyre::Error,
pub kind: ErrorKind,
pub revision: Option<Revision>,
}
impl Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}: {}", self.kind.as_str(), self.source)
}
}
impl Error {
pub fn new<E: Into<color_eyre::eyre::Error>>(source: E, kind: ErrorKind) -> Self {
Error {
source: source.into(),
kind,
revision: None,
}
}
}
impl From<InvalidId> for Error {
fn from(err: InvalidId) -> Self {
Error::new(err, crate::error::ErrorKind::InvalidPackageId)
}
}
impl From<std::io::Error> for Error {
fn from(e: std::io::Error) -> Self {
Error::new(e, ErrorKind::Filesystem)
}
}
impl From<std::str::Utf8Error> for Error {
fn from(e: std::str::Utf8Error) -> Self {
Error::new(e, ErrorKind::Utf8)
}
}
impl From<std::string::FromUtf8Error> for Error {
fn from(e: std::string::FromUtf8Error) -> Self {
Error::new(e, ErrorKind::Utf8)
}
}
impl From<emver::ParseError> for Error {
fn from(e: emver::ParseError) -> Self {
Error::new(e, ErrorKind::ParseVersion)
}
}
impl From<rpc_toolkit::url::ParseError> for Error {
fn from(e: rpc_toolkit::url::ParseError) -> Self {
Error::new(e, ErrorKind::ParseUrl)
}
}
impl From<std::num::ParseIntError> for Error {
fn from(e: std::num::ParseIntError) -> Self {
Error::new(e, ErrorKind::ParseNumber)
}
}
impl From<std::num::ParseFloatError> for Error {
fn from(e: std::num::ParseFloatError) -> Self {
Error::new(e, ErrorKind::ParseNumber)
}
}
impl From<patch_db::Error> for Error {
fn from(e: patch_db::Error) -> Self {
Error::new(e, ErrorKind::Database)
}
}
impl From<sqlx::Error> for Error {
fn from(e: sqlx::Error) -> Self {
Error::new(e, ErrorKind::Database)
}
}
impl From<ed25519_dalek::SignatureError> for Error {
fn from(e: ed25519_dalek::SignatureError) -> Self {
Error::new(e, ErrorKind::InvalidSignature)
}
}
impl From<bollard::errors::Error> for Error {
fn from(e: bollard::errors::Error) -> Self {
Error::new(e, ErrorKind::Docker)
}
}
impl From<torut::control::ConnError> for Error {
fn from(e: torut::control::ConnError) -> Self {
Error::new(eyre!("{:?}", e), ErrorKind::Tor)
}
}
impl From<std::net::AddrParseError> for Error {
fn from(e: std::net::AddrParseError) -> Self {
Error::new(e, ErrorKind::ParseNetAddress)
}
}
impl From<openssl::error::ErrorStack> for Error {
fn from(e: openssl::error::ErrorStack) -> Self {
Error::new(eyre!("{}", e), ErrorKind::OpenSsl)
}
}
impl From<mbrman::Error> for Error {
fn from(e: mbrman::Error) -> Self {
Error::new(e, ErrorKind::DiskManagement)
}
}
impl From<InvalidUri> for Error {
fn from(e: InvalidUri) -> Self {
Error::new(eyre!("{}", e), ErrorKind::ParseUrl)
}
}
impl From<Error> for RpcError {
fn from(e: Error) -> Self {
let mut data_object = serde_json::Map::with_capacity(3);
data_object.insert("details".to_owned(), format!("{}", e.source).into());
data_object.insert("debug".to_owned(), format!("{:?}", e.source).into());
data_object.insert(
"revision".to_owned(),
match serde_json::to_value(&e.revision) {
Ok(a) => a,
Err(e) => {
tracing::warn!("Error serializing revision for Error object: {}", e);
serde_json::Value::Null
}
},
);
RpcError {
code: e.kind as i32,
message: e.kind.as_str().into(),
data: Some(data_object.into()),
}
}
}
pub use models::{Error, ErrorKind, ResultExt};
#[derive(Debug, Default)]
pub struct ErrorCollection(Vec<Error>);
@@ -340,46 +50,6 @@ impl std::fmt::Display for ErrorCollection {
}
}
pub trait ResultExt<T, E>
where
Self: Sized,
{
fn with_kind(self, kind: ErrorKind) -> Result<T, Error>;
fn with_ctx<F: FnOnce(&E) -> (ErrorKind, D), D: Display + Send + Sync + 'static>(
self,
f: F,
) -> Result<T, Error>;
}
impl<T, E> ResultExt<T, E> for Result<T, E>
where
color_eyre::eyre::Error: From<E>,
{
fn with_kind(self, kind: ErrorKind) -> Result<T, Error> {
self.map_err(|e| Error {
source: e.into(),
kind,
revision: None,
})
}
fn with_ctx<F: FnOnce(&E) -> (ErrorKind, D), D: Display + Send + Sync + 'static>(
self,
f: F,
) -> Result<T, Error> {
self.map_err(|e| {
let (kind, ctx) = f(&e);
let source = color_eyre::eyre::Error::from(e);
let ctx = format!("{}: {}", ctx, source);
let source = source.wrap_err(ctx);
Error {
kind,
source: source.into(),
revision: None,
}
})
}
}
#[macro_export]
macro_rules! ensure_code {
($x:expr, $c:expr, $fmt:expr $(, $arg:expr)*) => {

View File

@@ -2,6 +2,7 @@ use std::path::{Path, PathBuf};
use color_eyre::eyre::eyre;
use mbrman::{MBRPartitionEntry, CHS, MBR};
use models::Error;
use rpc_toolkit::command;
use serde::{Deserialize, Serialize};
use tokio::process::Command;
@@ -14,7 +15,6 @@ use crate::disk::mount::guard::{MountGuard, TmpMountGuard};
use crate::disk::OsPartitionInfo;
use crate::util::serde::IoFormat;
use crate::util::{display_none, Invoke};
use crate::{Error, ResultExt};
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]

View File

@@ -556,3 +556,51 @@ async fn js_action_test_deep_dir_escape() {
.unwrap()
.unwrap();
}
#[tokio::test]
async fn js_rsync() {
let js_action = JsProcedure { args: vec![] };
let path: PathBuf = "test/js_action_execute/"
.parse::<PathBuf>()
.unwrap()
.canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::Action("test-rsync".parse().unwrap());
let volumes: Volumes = serde_json::from_value(serde_json::json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap();
let input: Option<serde_json::Value> = None;
let timeout = Some(Duration::from_secs(10));
js_action
.execute::<serde_json::Value, serde_json::Value>(
&path,
&package_id,
&package_version,
name,
&volumes,
input,
timeout,
Arc::new(|_, _, _, _| {
Box::pin(async move { Err("Can't run commands in test".to_string()) })
}),
Arc::new(|_| Box::pin(async move { Err("Can't run commands in test".to_string()) })),
)
.await
.unwrap()
.unwrap();
}

View File

@@ -4,6 +4,7 @@ use std::sync::Arc;
use color_eyre::eyre::eyre;
use futures::future::BoxFuture;
use futures::{StreamExt, TryFutureExt};
use helpers::{Rsync, RsyncOptions};
use josekit::jwk::Jwk;
use openssl::x509::X509;
use patch_db::DbHandle;
@@ -33,7 +34,6 @@ use crate::init::init;
use crate::middleware::encrypt::EncryptedWire;
use crate::net::ssl::SslManager;
use crate::sound::BEETHOVEN;
use crate::util::rsync::{Rsync, RsyncOptions};
use crate::{Error, ErrorKind, ResultExt};
#[instrument(skip(secrets))]

View File

@@ -4,6 +4,7 @@ use std::sync::Arc;
use clap::ArgMatches;
use color_eyre::eyre::{eyre, Result};
use emver::Version;
use helpers::{Rsync, RsyncOptions};
use lazy_static::lazy_static;
use patch_db::{DbHandle, LockType, Revision};
use reqwest::Url;
@@ -25,7 +26,7 @@ use crate::sound::{
CIRCLE_OF_5THS_SHORT, UPDATE_FAILED_1, UPDATE_FAILED_2, UPDATE_FAILED_3, UPDATE_FAILED_4,
};
use crate::update::latest_information::LatestInformation;
use crate::util::rsync::{Rsync, RsyncOptions};
use crate::util::Invoke;
use crate::version::{Current, VersionT};
use crate::{Error, ErrorKind, ResultExt};

View File

@@ -295,31 +295,3 @@ impl AsyncRead for BufferedWriteReader {
}
}
}
#[pin_project::pin_project]
pub struct ByteReplacementReader<R> {
pub replace: u8,
pub with: u8,
#[pin]
pub inner: R,
}
impl<R: AsyncRead> AsyncRead for ByteReplacementReader<R> {
fn poll_read(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut ReadBuf<'_>,
) -> std::task::Poll<std::io::Result<()>> {
let this = self.project();
match this.inner.poll_read(cx, buf) {
Poll::Ready(Ok(())) => {
for idx in 0..buf.filled().len() {
if buf.filled()[idx] == *this.replace {
buf.filled_mut()[idx] = *this.with;
}
}
Poll::Ready(Ok(()))
}
a => a,
}
}
}

View File

@@ -27,7 +27,6 @@ pub mod config;
pub mod http_reader;
pub mod io;
pub mod logger;
pub mod rsync;
pub mod serde;
#[derive(Clone, Copy, Debug)]

View File

@@ -915,4 +915,63 @@ export const action = {
},
};
},
/**
* Want to test that rsync works
* @param {*} effects
* @param {*} _input
* @returns
*/
async "test-rsync"(effects, _input) {
try {
await effects
.removeDir({
volumeId: "main",
path: "test-rsync-out",
})
.catch(() => {});
const runningRsync = effects.runRsync({
srcVolume: "main",
srcPath: "testing-rsync",
dstVolume: "main",
dstPath: "test-rsync-out",
options: {
delete: true,
force: true,
ignoreExisting: false,
}
});
assert(await runningRsync.id() >= 1, "Expect that we have an id");
const progress = await runningRsync.progress()
assert(progress >= 0 && progress <= 1, `Expect progress to be 0 <= progress(${progress}) <= 1`);
await runningRsync.wait();
assert((await effects.readFile({
volumeId: "main",
path: "test-rsync-out/testing-rsync/someFile.txt",
})).length > 0, 'Asserting that we read in the file "test_rsync/test-package/0.3.0.3/embassy.js"');
return {
result: {
copyable: false,
message: "Done",
version: "0",
qr: false,
},
};
}
catch (e) {
throw e;
}
finally {
await effects
.removeDir({
volumeId: "main",
path: "test-rsync-out",
})
.catch(() => {});
}
},
};

View File

@@ -0,0 +1 @@
Here's something in this file for the rsync

1210
libs/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,9 @@
[workspace]
members = [
"snapshot-creator",
"snapshot_creator",
"models",
"js_engine",
"helpers",
"embassy-container-init",
"embassy_container_init",
]

View File

@@ -15,11 +15,11 @@ fi
echo "Building "
cd ..
docker run --rm $USE_TTY -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$(pwd)":/home/rust/src start9/rust-arm-cross:aarch64 sh -c "(cd libs/ && cargo build -p snapshot-creator --release )"
docker run --rm $USE_TTY -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$(pwd)":/home/rust/src start9/rust-arm-cross:aarch64 sh -c "(cd libs/ && cargo build -p snapshot_creator --release )"
cd -
echo "Creating Arm v8 Snapshot"
docker run $USE_TTY --platform linux/arm64/v8 --mount type=bind,src=$(pwd),dst=/mnt arm64v8/ubuntu:20.04 /bin/sh -c "cd /mnt && /mnt/target/aarch64-unknown-linux-gnu/release/snapshot-creator"
docker run $USE_TTY --platform linux/arm64/v8 --mount type=bind,src=$(pwd),dst=/mnt arm64v8/ubuntu:20.04 /bin/sh -c "cd /mnt && /mnt/target/aarch64-unknown-linux-gnu/release/snapshot_creator"
sudo chown -R $USER target
sudo chown -R $USER ~/.cargo
sudo chown $USER JS_SNAPSHOT.bin

View File

@@ -9,7 +9,7 @@ if [ "$0" != "./build-v8-snapshot.sh" ]; then
fi
echo "Creating v8 Snapshot"
cargo run -p snapshot-creator --release
cargo run -p snapshot_creator --release
sudo chown -R $USER target
sudo chown -R $USER ~/.cargo
sudo chown $USER JS_SNAPSHOT.bin

View File

@@ -7,11 +7,14 @@ pub type InputJsonRpc = JsonRpc<Input>;
pub type OutputJsonRpc = JsonRpc<Output>;
/// Based on the jsonrpc spec, but we are limiting the rpc to a subset
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Serialize, Copy, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord)]
#[serde(untagged)]
pub enum RpcId {
UInt(u32),
}
/// Know what the process is called
#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct ProcessId(pub u32);
/// We use the JSON rpc as the format to share between the stdin and stdout for the executable.
/// Note: We are not allowing the id to not exist, used to ensure all pairs of messages are tracked
@@ -76,6 +79,9 @@ where
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
pub enum Output {
/// Will be called almost right away and only once per RpcId. Indicates what
/// was spawned in the container.
ProcessId(ProcessId),
/// This is the line buffered output of the command
Line(String),
/// This is some kind of error with the program

View File

@@ -3,7 +3,9 @@ use std::process::Stdio;
use std::sync::Arc;
use async_stream::stream;
use embassy_container_init::{Input, InputJsonRpc, JsonRpc, Output, OutputJsonRpc, RpcId};
use embassy_container_init::{
Input, InputJsonRpc, JsonRpc, Output, OutputJsonRpc, ProcessId, RpcId,
};
use futures::{pin_mut, Stream, StreamExt};
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio::process::{Child, Command};
@@ -36,6 +38,9 @@ impl ChildAndRpc {
child: command.spawn()?,
})
}
fn id(&self) -> Option<ProcessId> {
self.child.id().map(ProcessId)
}
async fn wait(&mut self) -> DoneProgram {
let status = DoneProgramStatus::Wait(self.child.wait().await);
DoneProgram {
@@ -63,7 +68,7 @@ impl ChildAndRpc {
#[derive(Debug, Clone)]
struct Io {
commands: Arc<Mutex<BTreeMap<RpcId, oneshot::Sender<()>>>>,
ids: Arc<Mutex<BTreeMap<RpcId, u32>>>,
ids: Arc<Mutex<BTreeMap<RpcId, ProcessId>>>,
}
impl Io {
@@ -107,8 +112,9 @@ impl Io {
Ok(a) => a,
};
if let Some(child_id) = child_and_rpc.child.id() {
io.ids.lock().await.insert(id.clone(), child_id);
if let Some(child_id) = child_and_rpc.id() {
io.ids.lock().await.insert(id.clone(), child_id.clone());
yield JsonRpc::new(id.clone(), Output::ProcessId(child_id));
}
let stdout = child_and_rpc.child
@@ -218,7 +224,7 @@ impl Io {
async fn clean_id(
&self,
done_program: &DoneProgram,
) -> (Option<u32>, Option<oneshot::Sender<()>>) {
) -> (Option<ProcessId>, Option<oneshot::Sender<()>>) {
(
self.ids.lock().await.remove(&done_program.id),
self.commands.lock().await.remove(&done_program.id),
@@ -230,7 +236,7 @@ impl Io {
let output = match self.remove_cmd_id(rpc).await {
Some(id) => {
let mut cmd = tokio::process::Command::new("kill");
cmd.arg(format!("{id}"));
cmd.arg(format!("{}", id.0));
cmd.output().await
}
None => return,
@@ -252,7 +258,7 @@ impl Io {
}
}
async fn remove_cmd_id(&self, rpc: &RpcId) -> Option<u32> {
async fn remove_cmd_id(&self, rpc: &RpcId) -> Option<ProcessId> {
self.ids.lock().await.remove(rpc)
}
}

View File

@@ -8,6 +8,9 @@ edition = "2021"
[dependencies]
color-eyre = "0.6.2"
futures = "0.3.21"
pin-project = "1.0.11"
tokio = { version = "1.19.2", features = ["full"] }
models = { path = "../models" }
pin-project = "1.0.11"
serde = { version = "1.0", features = ["derive", "rc"] }
tokio = { version = "1.19.2", features = ["full"] }
tokio-stream = { version = "0.1.9", features = ["io-util", "sync"] }
tracing = "0.1.35"

View File

@@ -0,0 +1,31 @@
use std::task::Poll;
use tokio::io::{AsyncRead, ReadBuf};
#[pin_project::pin_project]
pub struct ByteReplacementReader<R> {
pub replace: u8,
pub with: u8,
#[pin]
pub inner: R,
}
impl<R: AsyncRead> AsyncRead for ByteReplacementReader<R> {
fn poll_read(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut ReadBuf<'_>,
) -> std::task::Poll<std::io::Result<()>> {
let this = self.project();
match this.inner.poll_read(cx, buf) {
Poll::Ready(Ok(())) => {
for idx in 0..buf.filled().len() {
if buf.filled()[idx] == *this.replace {
buf.filled_mut()[idx] = *this.with;
}
}
Poll::Ready(Ok(()))
}
a => a,
}
}
}

View File

@@ -9,7 +9,11 @@ use tokio::fs::File;
use tokio::sync::oneshot;
use tokio::task::{JoinError, JoinHandle, LocalSet};
mod byte_replacement_reader;
mod rsync;
mod script_dir;
pub use byte_replacement_reader::*;
pub use rsync::*;
pub use script_dir::*;
pub fn to_tmp_path(path: impl AsRef<Path>) -> Result<PathBuf, Error> {

View File

@@ -1,15 +1,15 @@
use color_eyre::eyre::eyre;
use std::path::Path;
use helpers::NonDetachingJoinHandle;
use crate::{ByteReplacementReader, NonDetachingJoinHandle};
use models::{Error, ErrorKind};
use tokio::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
use tokio::process::{Child, Command};
use tokio::sync::watch;
use tokio_stream::wrappers::WatchStream;
use crate::util::io::ByteReplacementReader;
use crate::{Error, ErrorKind};
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RsyncOptions {
pub delete: bool,
pub force: bool,
@@ -56,8 +56,24 @@ impl Rsync {
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.spawn()?;
let cmd_stdout = command.stdout.take().unwrap();
let mut cmd_stderr = command.stderr.take().unwrap();
let cmd_stdout = match command.stdout.take() {
None => {
return Err(Error::new(
eyre!("rsync command stdout is none"),
ErrorKind::Filesystem,
))
}
Some(a) => a,
};
let mut cmd_stderr = match command.stderr.take() {
None => {
return Err(Error::new(
eyre!("rsync command stderr is none"),
ErrorKind::Filesystem,
))
}
Some(a) => a,
};
let (send, recv) = watch::channel(0.0);
let stderr = tokio::spawn(async move {
let mut res = String::new();
@@ -77,7 +93,12 @@ impl Rsync {
.split_ascii_whitespace()
.find_map(|col| col.strip_suffix("%"))
{
send.send(percentage.parse::<f64>()? / 100.0).unwrap();
if let Err(err) = send.send(percentage.parse::<f64>()? / 100.0) {
return Err(Error::new(
eyre!("rsync progress send error: {}", err),
ErrorKind::Filesystem,
));
}
}
}
Ok(())
@@ -92,12 +113,24 @@ impl Rsync {
}
pub async fn wait(mut self) -> Result<(), Error> {
let status = self.command.wait().await?;
let stderr = self.stderr.await.unwrap()?;
let stderr = match self.stderr.await {
Err(err) => {
return Err(Error::new(
eyre!("rsync stderr error: {}", err),
ErrorKind::Filesystem,
))
}
Ok(a) => a?,
};
if status.success() {
tracing::info!("rsync: {}", stderr);
} else {
return Err(Error::new(
eyre!("rsync error: {}", stderr),
eyre!(
"rsync error: {} {} ",
status.code().map(|x| x.to_string()).unwrap_or_default(),
stderr
),
ErrorKind::Filesystem,
));
}

View File

@@ -11,7 +11,7 @@ dashmap = "5.3.4"
deno_core = "=0.136.0"
deno_ast = { version = "=0.15.0", features = ["transpiling"] }
dprint-swc-ext = "=0.1.1"
embassy_container_init = { path = "../embassy-container-init" }
embassy_container_init = { path = "../embassy_container_init" }
reqwest = { version = "0.11.11" }
swc_atoms = "=0.2.11"
swc_common = "=0.18.7"

View File

@@ -44,14 +44,18 @@ const runDaemon = (
{ command = requireParam("command"), args = [] } = requireParam("options"),
) => {
let id = Deno.core.opAsync("start_command", command, args);
let rpcId = id.then(x => x.rpcId)
let processId = id.then(x => x.processId)
let waitPromise = null;
return {
processId,
rpcId,
async wait() {
waitPromise = waitPromise || Deno.core.opAsync("wait_command", await id)
waitPromise = waitPromise || Deno.core.opAsync("wait_command", await rpcId)
return waitPromise
},
async term() {
return Deno.core.opAsync("term_command", await id)
return Deno.core.opAsync("term_command", await rpcId)
}
}
};
@@ -128,6 +132,31 @@ const fetch = async (url = requireParam ('url'), options = null) => {
};
};
const runRsync = (
{
srcVolume = requireParam("srcVolume"),
dstVolume = requireParam("dstVolume"),
srcPath = requireParam("srcPath"),
dstPath = requireParam("dstPath"),
options = requireParam("options"),
} = requireParam("options"),
) => {
let id = Deno.core.opAsync("rsync", srcVolume, srcPath, dstVolume, dstPath, options);
let waitPromise = null;
return {
async id() {
return id
},
async wait() {
waitPromise = waitPromise || Deno.core.opAsync("rsync_wait", await id)
return waitPromise
},
async progress() {
return Deno.core.opAsync("rsync_progress", await id)
}
}
};
const currentFunction = Deno.core.opSync("current_function");
const input = Deno.core.opSync("get_input");
const variable_args = Deno.core.opSync("get_variable_args");
@@ -151,7 +180,8 @@ const effects = {
rename,
runCommand,
sleep,
runDaemon
runDaemon,
runRsync
};
const runFunction = jsonPointerValue(mainModule, currentFunction);

View File

@@ -11,7 +11,8 @@ use deno_core::{
resolve_import, Extension, JsRuntime, ModuleLoader, ModuleSource, ModuleSourceFuture,
ModuleSpecifier, ModuleType, OpDecl, RuntimeOptions, Snapshot,
};
use helpers::{script_dir, spawn_local};
use embassy_container_init::RpcId;
use helpers::{script_dir, spawn_local, Rsync};
use models::{ExecCommand, PackageId, ProcedureName, TermCommand, Version, VolumeId};
use serde::{Deserialize, Serialize};
use serde_json::Value;
@@ -83,7 +84,7 @@ const SNAPSHOT_BYTES: &[u8] = include_bytes!("./artifacts/JS_SNAPSHOT.bin");
#[cfg(target_arch = "aarch64")]
const SNAPSHOT_BYTES: &[u8] = include_bytes!("./artifacts/ARM_JS_SNAPSHOT.bin");
type WaitFns = Arc<Mutex<BTreeMap<u32, Pin<Box<dyn Future<Output = ResultType>>>>>>;
type WaitFns = Arc<Mutex<BTreeMap<RpcId, Pin<Box<dyn Future<Output = ResultType>>>>>>;
#[derive(Clone)]
struct JsContext {
@@ -98,6 +99,7 @@ struct JsContext {
command_inserter: ExecCommand,
term_command: TermCommand,
wait_fns: WaitFns,
rsyncs: Arc<Mutex<(usize, BTreeMap<usize, Rsync>)>>,
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "kebab-case")]
@@ -262,13 +264,13 @@ impl JsExecutionEnvironment {
Err(err) => {
tracing::error!("{}", err);
tracing::debug!("{:?}", err);
return Err((
Err((
JsError::BoundryLayerSerDe,
format!(
"Couldn't convert output = {:#?} to the correct type",
serde_json::to_string_pretty(&output).unwrap_or_default()
),
));
))
}
}
}
@@ -296,6 +298,9 @@ impl JsExecutionEnvironment {
fns::wait_command::decl(),
fns::sleep::decl(),
fns::term_command::decl(),
fns::rsync::decl(),
fns::rsync_wait::decl(),
fns::rsync_progress::decl(),
]
}
@@ -328,6 +333,7 @@ impl JsExecutionEnvironment {
command_inserter: self.command_inserter.clone(),
term_command: self.term_command.clone(),
wait_fns: Default::default(),
rsyncs: Default::default(),
};
let ext = Extension::builder()
.ops(Self::declarations())
@@ -382,9 +388,10 @@ mod fns {
use deno_core::anyhow::{anyhow, bail};
use deno_core::error::AnyError;
use deno_core::*;
use embassy_container_init::RpcId;
use helpers::{to_tmp_path, AtomicFile};
use embassy_container_init::{ProcessId, RpcId};
use helpers::{to_tmp_path, AtomicFile, Rsync, RsyncOptions};
use models::{TermCommand, VolumeId};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use tokio::io::AsyncWriteExt;
@@ -644,6 +651,106 @@ mod fns {
tokio::fs::rename(old_file, new_file).await?;
Ok(())
}
#[op]
async fn rsync(
state: Rc<RefCell<OpState>>,
src_volume: VolumeId,
src_path: PathBuf,
dst_volume: VolumeId,
dst_path: PathBuf,
options: RsyncOptions,
) -> Result<usize, AnyError> {
let (volumes, volume_path, volume_path_out, rsyncs) = {
let state = state.borrow();
let ctx: &JsContext = state.borrow();
let volume_path = ctx
.volumes
.path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &src_volume)
.ok_or_else(|| anyhow!("There is no {} in volumes", src_volume))?;
let volume_path_out = ctx
.volumes
.path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &dst_volume)
.ok_or_else(|| anyhow!("There is no {} in volumes", dst_volume))?;
(
ctx.volumes.clone(),
volume_path,
volume_path_out,
ctx.rsyncs.clone(),
)
};
if volumes.readonly(&dst_volume) {
bail!("Volume {} is readonly", dst_volume);
}
let src = volume_path.join(src_path);
// With the volume check
if !is_subset(&volume_path, &src).await? {
bail!(
"Path '{}' has broken away from parent '{}'",
src.to_string_lossy(),
volume_path.to_string_lossy(),
);
}
if let Err(_) = tokio::fs::metadata(&src).await {
bail!("Source at {} does not exists", src.to_string_lossy());
}
let dst = volume_path_out.join(dst_path);
// With the volume check
if !is_subset(&volume_path_out, &dst).await? {
bail!(
"Path '{}' has broken away from parent '{}'",
dst.to_string_lossy(),
volume_path_out.to_string_lossy(),
);
}
let running_rsync =
Rsync::new(src, dst, options).map_err(|e| anyhow::anyhow!("{:?}", e.source))?;
let insert_id = {
let mut rsyncs = rsyncs.lock().await;
let next = rsyncs.0 + 1;
rsyncs.0 = next;
rsyncs.1.insert(next, running_rsync);
next
};
Ok(insert_id)
}
#[op]
async fn rsync_wait(state: Rc<RefCell<OpState>>, id: usize) -> Result<(), AnyError> {
let rsyncs = {
let state = state.borrow();
let ctx: &JsContext = state.borrow();
ctx.rsyncs.clone()
};
let running_rsync = match rsyncs.lock().await.1.remove(&id) {
Some(a) => a,
None => bail!("Couldn't find rsync at id {id}"),
};
running_rsync
.wait()
.await
.map_err(|x| anyhow::anyhow!("{}", x.source))?;
Ok(())
}
#[op]
async fn rsync_progress(state: Rc<RefCell<OpState>>, id: usize) -> Result<f64, AnyError> {
use futures::StreamExt;
let rsyncs = {
let state = state.borrow();
let ctx: &JsContext = state.borrow();
ctx.rsyncs.clone()
};
let mut running_rsync = match rsyncs.lock().await.1.remove(&id) {
Some(a) => a,
None => bail!("Couldn't find rsync at id {id}"),
};
let progress = running_rsync.progress.next().await.unwrap_or_default();
rsyncs.lock().await.1.insert(id, running_rsync);
Ok(progress)
}
#[op]
async fn remove_file(
state: Rc<RefCell<OpState>>,
@@ -835,13 +942,20 @@ mod fns {
Ok(())
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct StartCommand {
rpc_id: RpcId,
process_id: ProcessId,
}
#[op]
async fn start_command(
state: Rc<RefCell<OpState>>,
command: String,
args: Vec<String>,
timeout: Option<u64>,
) -> Result<u32, AnyError> {
) -> Result<StartCommand, AnyError> {
use embassy_container_init::Output;
let (command_inserter, wait_fns) = {
let state = state.borrow();
@@ -850,7 +964,7 @@ mod fns {
};
let (sender, mut receiver) = tokio::sync::mpsc::unbounded_channel::<Output>();
let id = match command_inserter(
let rpc_id = match command_inserter(
command,
args.into_iter().collect(),
sender,
@@ -859,15 +973,28 @@ mod fns {
.await
{
Err(err) => bail!(err),
Ok(RpcId::UInt(a)) => a,
Ok(rpc_id) => rpc_id,
};
let (process_id_send, process_id_recv) = tokio::sync::oneshot::channel::<ProcessId>();
let wait = async move {
let mut answer = String::new();
let mut command_error = String::new();
let mut status: Option<i32> = None;
let mut process_id_send = Some(process_id_send);
while let Some(output) = receiver.recv().await {
match output {
Output::ProcessId(process_id) => {
if let Some(process_id_send) = process_id_send.take() {
if let Err(err) = process_id_send.send(process_id) {
tracing::error!(
"Could not get a process id {process_id:?} sent for {rpc_id:?}"
);
tracing::debug!("{err:?}");
}
}
}
Output::Line(value) => {
answer.push_str(&value);
answer.push('\n');
@@ -892,12 +1019,13 @@ mod fns {
ResultType::Result(serde_json::Value::String(answer))
};
wait_fns.lock().await.insert(id, Box::pin(wait));
Ok(id)
wait_fns.lock().await.insert(rpc_id, Box::pin(wait));
let process_id = process_id_recv.await?;
Ok(StartCommand { rpc_id, process_id })
}
#[op]
async fn wait_command(state: Rc<RefCell<OpState>>, id: u32) -> Result<ResultType, AnyError> {
async fn wait_command(state: Rc<RefCell<OpState>>, id: RpcId) -> Result<ResultType, AnyError> {
let wait_fns = {
let state = state.borrow();
let ctx = state.borrow::<JsContext>();
@@ -906,7 +1034,7 @@ mod fns {
let found_future = match wait_fns.lock().await.remove(&id) {
Some(a) => a,
None => bail!("No future for id {id}, could have been removed already"),
None => bail!("No future for id {id:?}, could have been removed already"),
};
Ok(found_future.await)

View File

@@ -6,14 +6,29 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
embassy_container_init = { path = "../embassy-container-init" }
bollard = "0.13.0"
color-eyre = "0.6.1"
ed25519-dalek = { version = "1.0.1", features = ["serde"] }
mbrman = "0.5.0"
embassy_container_init = { path = "../embassy_container_init" }
emver = { version = "0.1", git = "https://github.com/Start9Labs/emver-rs.git", features = [
"serde",
] }
openssl = { version = "0.10.41", features = ["vendored"] }
patch-db = { version = "*", path = "../../patch-db/patch-db", features = [
"trace",
] }
serde = { version = "1.0", features = ["derive", "rc"] }
rand = "0.8"
tokio = { version = "1", features = ["full"] }
rpc-toolkit = "0.2.1"
serde = { version = "1.0", features = ["derive", "rc"] }
serde_json = "1.0.82"
sqlx = { version = "0.6.0", features = [
"chrono",
"offline",
"runtime-tokio-rustls",
"postgres",
] }
thiserror = "1.0"
tokio = { version = "1", features = ["full"] }
torut = "0.2.1"
tracing = "0.1.35"

388
libs/models/src/errors.rs Normal file
View File

@@ -0,0 +1,388 @@
use std::fmt::Display;
use crate::InvalidId;
use color_eyre::eyre::eyre;
use patch_db::Revision;
use rpc_toolkit::{hyper::http::uri::InvalidUri, yajrc::RpcError};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ErrorKind {
Unknown = 1,
Filesystem = 2,
Docker = 3,
ConfigSpecViolation = 4,
ConfigRulesViolation = 5,
NotFound = 6,
IncorrectPassword = 7,
VersionIncompatible = 8,
Network = 9,
Registry = 10,
Serialization = 11,
Deserialization = 12,
Utf8 = 13,
ParseVersion = 14,
IncorrectDisk = 15,
// Nginx = 16,
Dependency = 17,
ParseS9pk = 18,
ParseUrl = 19,
DiskNotAvailable = 20,
BlockDevice = 21,
InvalidOnionAddress = 22,
Pack = 23,
ValidateS9pk = 24,
DiskCorrupted = 25, // Remove
Tor = 26,
ConfigGen = 27,
ParseNumber = 28,
Database = 29,
InvalidPackageId = 30,
InvalidSignature = 31,
Backup = 32,
Restore = 33,
Authorization = 34,
AutoConfigure = 35,
Action = 36,
RateLimited = 37,
InvalidRequest = 38,
MigrationFailed = 39,
Uninitialized = 40,
ParseNetAddress = 41,
ParseSshKey = 42,
SoundError = 43,
ParseTimestamp = 44,
ParseSysInfo = 45,
Wifi = 46,
Journald = 47,
DiskManagement = 48,
OpenSsl = 49,
PasswordHashGeneration = 50,
DiagnosticMode = 51,
ParseDbField = 52,
Duplicate = 53,
MultipleErrors = 54,
Incoherent = 55,
InvalidBackupTargetId = 56,
ProductKeyMismatch = 57,
LanPortConflict = 58,
Javascript = 59,
Pem = 60,
TLSInit = 61,
HttpRange = 62,
ContentLength = 63,
BytesError = 64,
InvalidIP = 65,
JoinError = 66,
AsciiError = 67,
NoHost = 68,
SignError = 69,
}
impl ErrorKind {
pub fn as_str(&self) -> &'static str {
use ErrorKind::*;
match self {
Unknown => "Unknown Error",
Filesystem => "Filesystem I/O Error",
Docker => "Docker Error",
ConfigSpecViolation => "Config Spec Violation",
ConfigRulesViolation => "Config Rules Violation",
NotFound => "Not Found",
IncorrectPassword => "Incorrect Password",
VersionIncompatible => "Version Incompatible",
Network => "Network Error",
Registry => "Registry Error",
Serialization => "Serialization Error",
Deserialization => "Deserialization Error",
Utf8 => "UTF-8 Parse Error",
ParseVersion => "Version Parsing Error",
IncorrectDisk => "Incorrect Disk",
// Nginx => "Nginx Error",
Dependency => "Dependency Error",
ParseS9pk => "S9PK Parsing Error",
ParseUrl => "URL Parsing Error",
DiskNotAvailable => "Disk Not Available",
BlockDevice => "Block Device Error",
InvalidOnionAddress => "Invalid Onion Address",
Pack => "Pack Error",
ValidateS9pk => "S9PK Validation Error",
DiskCorrupted => "Disk Corrupted", // Remove
Tor => "Tor Daemon Error",
ConfigGen => "Config Generation Error",
ParseNumber => "Number Parsing Error",
Database => "Database Error",
InvalidPackageId => "Invalid Package ID",
InvalidSignature => "Invalid Signature",
Backup => "Backup Error",
Restore => "Restore Error",
Authorization => "Unauthorized",
AutoConfigure => "Auto-Configure Error",
Action => "Action Failed",
RateLimited => "Rate Limited",
InvalidRequest => "Invalid Request",
MigrationFailed => "Migration Failed",
Uninitialized => "Uninitialized",
ParseNetAddress => "Net Address Parsing Error",
ParseSshKey => "SSH Key Parsing Error",
SoundError => "Sound Interface Error",
ParseTimestamp => "Timestamp Parsing Error",
ParseSysInfo => "System Info Parsing Error",
Wifi => "WiFi Internal Error",
Journald => "Journald Error",
DiskManagement => "Disk Management Error",
OpenSsl => "OpenSSL Internal Error",
PasswordHashGeneration => "Password Hash Generation Error",
DiagnosticMode => "Embassy is in Diagnostic Mode",
ParseDbField => "Database Field Parse Error",
Duplicate => "Duplication Error",
MultipleErrors => "Multiple Errors",
Incoherent => "Incoherent",
InvalidBackupTargetId => "Invalid Backup Target ID",
ProductKeyMismatch => "Incompatible Product Keys",
LanPortConflict => "Incompatible LAN Port Configuration",
Javascript => "Javascript Engine Error",
Pem => "PEM Encoding Error",
TLSInit => "TLS Backend Initialize Error",
HttpRange => "No Support for Web Server HTTP Ranges",
ContentLength => "Request has no content length header",
BytesError => "Could not get the bytes for this request",
InvalidIP => "Could not parse this IP address",
JoinError => "Join Handle Error",
AsciiError => "Could not parse ascii text",
NoHost => "No Host header ",
SignError => "Signing error",
}
}
}
impl Display for ErrorKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
#[derive(Debug)]
pub struct Error {
pub source: color_eyre::eyre::Error,
pub kind: ErrorKind,
pub revision: Option<Revision>,
}
impl Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}: {}", self.kind.as_str(), self.source)
}
}
impl Error {
pub fn new<E: Into<color_eyre::eyre::Error>>(source: E, kind: ErrorKind) -> Self {
Error {
source: source.into(),
kind,
revision: None,
}
}
}
impl From<InvalidId> for Error {
fn from(err: InvalidId) -> Self {
Error::new(err, ErrorKind::InvalidPackageId)
}
}
impl From<std::io::Error> for Error {
fn from(e: std::io::Error) -> Self {
Error::new(e, ErrorKind::Filesystem)
}
}
impl From<std::str::Utf8Error> for Error {
fn from(e: std::str::Utf8Error) -> Self {
Error::new(e, ErrorKind::Utf8)
}
}
impl From<std::string::FromUtf8Error> for Error {
fn from(e: std::string::FromUtf8Error) -> Self {
Error::new(e, ErrorKind::Utf8)
}
}
impl From<emver::ParseError> for Error {
fn from(e: emver::ParseError) -> Self {
Error::new(e, ErrorKind::ParseVersion)
}
}
impl From<rpc_toolkit::url::ParseError> for Error {
fn from(e: rpc_toolkit::url::ParseError) -> Self {
Error::new(e, ErrorKind::ParseUrl)
}
}
impl From<std::num::ParseIntError> for Error {
fn from(e: std::num::ParseIntError) -> Self {
Error::new(e, ErrorKind::ParseNumber)
}
}
impl From<std::num::ParseFloatError> for Error {
fn from(e: std::num::ParseFloatError) -> Self {
Error::new(e, ErrorKind::ParseNumber)
}
}
impl From<patch_db::Error> for Error {
fn from(e: patch_db::Error) -> Self {
Error::new(e, ErrorKind::Database)
}
}
impl From<sqlx::Error> for Error {
fn from(e: sqlx::Error) -> Self {
Error::new(e, ErrorKind::Database)
}
}
impl From<ed25519_dalek::SignatureError> for Error {
fn from(e: ed25519_dalek::SignatureError) -> Self {
Error::new(e, ErrorKind::InvalidSignature)
}
}
impl From<bollard::errors::Error> for Error {
fn from(e: bollard::errors::Error) -> Self {
Error::new(e, ErrorKind::Docker)
}
}
impl From<torut::control::ConnError> for Error {
fn from(e: torut::control::ConnError) -> Self {
Error::new(eyre!("{:?}", e), ErrorKind::Tor)
}
}
impl From<std::net::AddrParseError> for Error {
fn from(e: std::net::AddrParseError) -> Self {
Error::new(e, ErrorKind::ParseNetAddress)
}
}
impl From<openssl::error::ErrorStack> for Error {
fn from(e: openssl::error::ErrorStack) -> Self {
Error::new(eyre!("{}", e), ErrorKind::OpenSsl)
}
}
impl From<mbrman::Error> for Error {
fn from(e: mbrman::Error) -> Self {
Error::new(e, ErrorKind::DiskManagement)
}
}
impl From<InvalidUri> for Error {
fn from(e: InvalidUri) -> Self {
Error::new(eyre!("{}", e), ErrorKind::ParseUrl)
}
}
impl From<Error> for RpcError {
fn from(e: Error) -> Self {
let mut data_object = serde_json::Map::with_capacity(3);
data_object.insert("details".to_owned(), format!("{}", e.source).into());
data_object.insert("debug".to_owned(), format!("{:?}", e.source).into());
data_object.insert(
"revision".to_owned(),
match serde_json::to_value(&e.revision) {
Ok(a) => a,
Err(e) => {
tracing::warn!("Error serializing revision for Error object: {}", e);
serde_json::Value::Null
}
},
);
RpcError {
code: e.kind as i32,
message: e.kind.as_str().into(),
data: Some(data_object.into()),
}
}
}
#[derive(Debug, Default)]
pub struct ErrorCollection(Vec<Error>);
impl ErrorCollection {
pub fn new() -> Self {
Self::default()
}
pub fn handle<T, E: Into<Error>>(&mut self, result: Result<T, E>) -> Option<T> {
match result {
Ok(a) => Some(a),
Err(e) => {
self.0.push(e.into());
None
}
}
}
pub fn into_result(self) -> Result<(), Error> {
if self.0.is_empty() {
Ok(())
} else {
Err(Error::new(eyre!("{}", self), ErrorKind::MultipleErrors))
}
}
}
impl From<ErrorCollection> for Result<(), Error> {
fn from(e: ErrorCollection) -> Self {
e.into_result()
}
}
impl<T, E: Into<Error>> Extend<Result<T, E>> for ErrorCollection {
fn extend<I: IntoIterator<Item = Result<T, E>>>(&mut self, iter: I) {
for item in iter {
self.handle(item);
}
}
}
impl std::fmt::Display for ErrorCollection {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for (idx, e) in self.0.iter().enumerate() {
if idx > 0 {
write!(f, "; ")?;
}
write!(f, "{}", e)?;
}
Ok(())
}
}
pub trait ResultExt<T, E>
where
Self: Sized,
{
fn with_kind(self, kind: ErrorKind) -> Result<T, Error>;
fn with_ctx<F: FnOnce(&E) -> (ErrorKind, D), D: Display + Send + Sync + 'static>(
self,
f: F,
) -> Result<T, Error>;
}
impl<T, E> ResultExt<T, E> for Result<T, E>
where
color_eyre::eyre::Error: From<E>,
{
fn with_kind(self, kind: ErrorKind) -> Result<T, Error> {
self.map_err(|e| Error {
source: e.into(),
kind,
revision: None,
})
}
fn with_ctx<F: FnOnce(&E) -> (ErrorKind, D), D: Display + Send + Sync + 'static>(
self,
f: F,
) -> Result<T, Error> {
self.map_err(|e| {
let (kind, ctx) = f(&e);
let source = color_eyre::eyre::Error::from(e);
let ctx = format!("{}: {}", ctx, source);
let source = source.wrap_err(ctx);
Error {
kind,
source,
revision: None,
}
})
}
}
#[macro_export]
macro_rules! ensure_code {
($x:expr, $c:expr, $fmt:expr $(, $arg:expr)*) => {
if !($x) {
return Err(Error::new(color_eyre::eyre::eyre!($fmt, $($arg, )*), $c));
}
};
}

View File

@@ -1,4 +1,5 @@
mod action_id;
mod errors;
mod health_check_id;
mod id;
mod id_unchecked;
@@ -11,6 +12,7 @@ mod version;
mod volume_id;
pub use action_id::*;
pub use errors::*;
pub use health_check_id::*;
pub use id::*;
pub use id_unchecked::*;

View File

@@ -1,5 +1,5 @@
[package]
name = "snapshot-creator"
name = "snapshot_creator"
version = "0.1.0"
edition = "2021"