Merge branch 'next/minor' of github.com:Start9Labs/start-os into next/major

This commit is contained in:
Aiden McClelland
2023-11-13 14:59:16 -07:00
1115 changed files with 6871 additions and 1851 deletions

10
libs/.gitignore vendored
View File

@@ -1,10 +0,0 @@
/target
**/*.rs.bk
.DS_Store
.vscode
secrets.db
*.s9pk
*.sqlite3
.env
.editorconfig
proptest-regressions/*

View File

@@ -1,9 +0,0 @@
[workspace]
members = [
"snapshot_creator",
"models",
"js_engine",
"helpers",
"embassy_container_init",
]

View File

@@ -1,374 +0,0 @@
export namespace ExpectedExports {
/** Set configuration is called after we have modified and saved the configuration in the StartOS ui. Use this to make a file for the docker to read from for configuration. */
export type setConfig = (
effects: Effects,
input: Config
) => Promise<ResultType<SetResult>>;
/** Get configuration returns a shape that describes the format that the StartOS ui will generate, and later send to the set config */
export type getConfig = (effects: Effects) => Promise<ResultType<ConfigRes>>;
/** These are how we make sure the our dependency configurations are valid and if not how to fix them. */
export type dependencies = Dependencies;
/** Properties are used to get values from the docker, like a username + password, what ports we are hosting from */
export type properties = (
effects: Effects
) => Promise<ResultType<Properties>>;
export type health = {
/** Should be the health check id */
[id: string]: (
effects: Effects,
dateMs: number
) => Promise<ResultType<null | void>>;
};
export type migration = (
effects: Effects,
version: string
) => Promise<ResultType<MigrationRes>>;
}
/** Used to reach out from the pure js runtime */
export type Effects = {
/** Usable when not sandboxed */
writeFile(input: {
path: string;
volumeId: string;
toWrite: string;
}): Promise<void>;
readFile(input: { volumeId: string; path: string }): Promise<string>;
metadata(input: { volumeId: string; path: string }): Promise<Metadata>;
/** Create a directory. Usable when not sandboxed */
createDir(input: { volumeId: string; path: string }): Promise<string>;
/** Remove a directory. Usable when not sandboxed */
removeDir(input: { volumeId: string; path: string }): Promise<string>;
removeFile(input: { volumeId: string; path: string }): Promise<void>;
/** Write a json file into an object. Usable when not sandboxed */
writeJsonFile(input: {
volumeId: string;
path: string;
toWrite: object;
}): Promise<void>;
/** Read a json file into an object */
readJsonFile(input: { volumeId: string; path: string }): Promise<object>;
/** Log at the trace level */
trace(whatToPrint: string): void;
/** Log at the warn level */
warn(whatToPrint: string): void;
/** Log at the error level */
error(whatToPrint: string): void;
/** Log at the debug level */
debug(whatToPrint: string): void;
/** Log at the info level */
info(whatToPrint: string): void;
/** Sandbox mode lets us read but not write */
is_sandboxed(): boolean;
};
export type Metadata = {
fileType: string;
isDir: boolean;
isFile: boolean;
isSymlink: boolean;
len: number;
modified?: Date;
accessed?: Date;
created?: Date;
readonly: boolean;
uid: number;
gid: number;
mode: number;
};
export type MigrationRes = {
configured: boolean;
};
export type ActionResult = {
version: "0";
message: string;
value?: string;
copyable: boolean;
qr: boolean;
};
export type ConfigRes = {
/** This should be the previous config, that way during set config we start with the previous */
config?: Config;
/** Shape that is describing the form in the ui */
spec: ConfigSpec;
};
export type Config = {
[propertyName: string]: any;
};
export type ConfigSpec = {
/** Given a config value, define what it should render with the following spec */
[configValue: string]: ValueSpecAny;
};
export type WithDefault<T, Default> = T & {
default?: Default;
};
export type WithDescription<T> = T & {
description?: String;
name: string;
warning?: string;
};
export type ListSpec<T> = {
spec: T;
range: string;
};
export type Tag<T extends string, V> = V & {
type: T;
};
export type Subtype<T extends string, V> = V & {
subtype: T;
};
export type Target<T extends string, V> = V & {
target: T;
};
export type UniqueBy =
| {
any: UniqueBy[];
}
| string
| null;
export type WithNullable<T> = T & {
nullable: boolean;
};
export type DefaultString =
| String
| {
/** The chars available for the randome generation */
charset?: string;
/** Length that we generate to */
len: number;
};
export type ValueSpecString = (
| {}
| {
pattern: string;
"pattern-description": string;
}
) & {
copyable?: boolean;
masked?: boolean;
placeholder?: string;
};
export type ValueSpecNumber = {
/** Something like [3,6] or [0, *) */
range?: string;
integral?: boolean;
/** Used a description of the units */
units?: string;
placeholder?: number;
};
export type ValueSpecBoolean = {};
export type ValueSpecAny =
| Tag<"boolean", WithDescription<WithDefault<ValueSpecBoolean, boolean>>>
| Tag<
"string",
WithDescription<WithDefault<WithNullable<ValueSpecString>, DefaultString>>
>
| Tag<
"number",
WithDescription<WithDefault<WithNullable<ValueSpecNumber>, number>>
>
| Tag<
"enum",
WithDescription<
WithDefault<
{
values: string[];
"value-names": {
[key: string]: string;
};
},
string
>
>
>
| Tag<"list", ValueSpecList>
| Tag<"object", WithDescription<WithDefault<ValueSpecObject, Config>>>
| Tag<"union", WithDescription<WithDefault<ValueSpecUnion, string>>>
| Tag<
"pointer",
WithDescription<
| Subtype<
"package",
| Target<
"tor-key",
{
"package-id": string;
interface: string;
}
>
| Target<
"tor-address",
{
"package-id": string;
interface: string;
}
>
| Target<
"lan-address",
{
"package-id": string;
interface: string;
}
>
| Target<
"config",
{
"package-id": string;
selector: string;
multi: boolean;
}
>
>
| Subtype<"system", {}>
>
>;
export type ValueSpecUnion = {
/** What tag for the specification, for tag unions */
tag: {
id: string;
name: string;
description?: string;
"variant-names": {
[key: string]: string;
};
};
/** The possible enum values */
variants: {
[key: string]: ConfigSpec;
};
"display-as"?: string;
"unique-by"?: UniqueBy;
};
export type ValueSpecObject = {
spec: ConfigSpec;
"display-as"?: string;
"unique-by"?: UniqueBy;
};
export type ValueSpecList =
| Subtype<
"boolean",
WithDescription<WithDefault<ListSpec<ValueSpecBoolean>, boolean[]>>
>
| Subtype<
"string",
WithDescription<WithDefault<ListSpec<ValueSpecString>, string[]>>
>
| Subtype<
"number",
WithDescription<WithDefault<ListSpec<ValueSpecNumber>, number[]>>
>
| Subtype<
"enum",
WithDescription<WithDefault<ListSpec<ValueSpecEnum>, string[]>>
>
| Subtype<
"object",
WithDescription<WithDefault<ListSpec<ValueSpecObject>, {}[]>>
>
| Subtype<
"union",
WithDescription<WithDefault<ListSpec<ValueSpecUnion>, string[]>>
>;
export type ValueSpecEnum = {
values: string[];
"value-names": { [key: string]: string };
};
export type SetResult = {
/** These are the unix process signals */
signal:
| "SIGTERM"
| "SIGHUP"
| "SIGINT"
| "SIGQUIT"
| "SIGILL"
| "SIGTRAP"
| "SIGABRT"
| "SIGBUS"
| "SIGFPE"
| "SIGKILL"
| "SIGUSR1"
| "SIGSEGV"
| "SIGUSR2"
| "SIGPIPE"
| "SIGALRM"
| "SIGSTKFLT"
| "SIGCHLD"
| "SIGCONT"
| "SIGSTOP"
| "SIGTSTP"
| "SIGTTIN"
| "SIGTTOU"
| "SIGURG"
| "SIGXCPU"
| "SIGXFSZ"
| "SIGVTALRM"
| "SIGPROF"
| "SIGWINCH"
| "SIGIO"
| "SIGPWR"
| "SIGSYS"
| "SIGEMT"
| "SIGINFO";
"depends-on": {
[packageId: string]: string[];
};
};
export type KnownError =
| { error: String }
| {
"error-code": [number, string] | readonly [number, string];
};
export type ResultType<T> = KnownError | { result: T };
export type PackagePropertiesV2 = {
[name: string]: PackagePropertyObject | PackagePropertyString;
};
export type PackagePropertyString = {
type: "string";
description?: string;
value: string;
/** Let's the ui make this copyable button */
copyable?: boolean;
/** Let the ui create a qr for this field */
qr?: boolean;
/** Hiding the value unless toggled off for field */
masked?: boolean;
};
export type PackagePropertyObject = {
value: PackagePropertiesV2;
type: "object";
description: string;
};
export type Properties = {
version: 2;
data: PackagePropertiesV2;
};
export type Dependencies = {
/** Id is the id of the package, should be the same as the manifest */
[id: string]: {
/** Checks are called to make sure that our dependency is in the correct shape. If a known error is returned we know that the dependency needs modification */
check(effects: Effects, input: Config): Promise<ResultType<void | null>>;
/** This is called after we know that the dependency package needs a new configuration, this would be a transform for defaults */
autoConfigure(effects: Effects, input: Config): Promise<ResultType<Config>>;
};
};

View File

@@ -1,31 +0,0 @@
#!/bin/bash
# Reason for this being is that we need to create a snapshot for the deno runtime. It wants to pull 3 files from build, and during the creation it gets embedded, but for some
# reason during the actual runtime it is looking for them. So this will create a docker in arm that creates the snaphot needed for the arm
set -e
shopt -s expand_aliases
if [ "$0" != "./build-arm-v8-snapshot.sh" ]; then
>&2 echo "Must be run from backend/workspace directory"
exit 1
fi
USE_TTY=
if tty -s; then
USE_TTY="-it"
fi
alias 'rust-gnu-builder'='docker run $USE_TTY --rm -v "$HOME/.cargo/registry":/usr/local/cargo/registry -v "$(pwd)":/home/rust/src -w /home/rust/src -P start9/rust-arm-cross:aarch64'
echo "Building "
cd ..
rust-gnu-builder sh -c "(cd libs/ && cargo build -p snapshot_creator --release --target=aarch64-unknown-linux-gnu)"
cd -
echo "Creating Arm v8 Snapshot"
docker run $USE_TTY --platform linux/arm64/v8 --mount type=bind,src=$(pwd),dst=/mnt arm64v8/ubuntu:22.04 /bin/sh -c "cd /mnt && /mnt/target/aarch64-unknown-linux-gnu/release/snapshot_creator"
sudo chown -R $USER target
sudo chown -R $USER ~/.cargo
sudo chown $USER JS_SNAPSHOT.bin
sudo chmod 0644 JS_SNAPSHOT.bin
sudo mv -f JS_SNAPSHOT.bin ./js_engine/src/artifacts/ARM_JS_SNAPSHOT.bin

View File

@@ -1,18 +0,0 @@
#!/bin/bash
# Reason for this being is that we need to create a snapshot for the deno runtime. It wants to pull 3 files from build, and during the creation it gets embedded, but for some
# reason during the actual runtime it is looking for them. So this will create a docker in arm that creates the snaphot needed for the arm
set -e
if [ "$0" != "./build-v8-snapshot.sh" ]; then
>&2 echo "Must be run from backend/workspace directory"
exit 1
fi
echo "Creating v8 Snapshot"
cargo run -p snapshot_creator --release
sudo chown -R $USER target
sudo chown -R $USER ~/.cargo
sudo chown $USER JS_SNAPSHOT.bin
sudo chmod 0644 JS_SNAPSHOT.bin
sudo mv -f JS_SNAPSHOT.bin ./js_engine/src/artifacts/JS_SNAPSHOT.bin

View File

@@ -1,39 +0,0 @@
[package]
name = "embassy_container_init"
version = "0.1.0"
edition = "2021"
rust = "1.66"
[features]
dev = []
metal = []
sound = []
unstable = []
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
async-stream = "0.3"
# cgroups-rs = "0.2"
color-eyre = "0.6"
futures = "0.3"
serde = { version = "1", features = ["derive", "rc"] }
serde_json = "1"
helpers = { path = "../helpers" }
imbl = "2"
nix = { version = "0.27", features = ["process", "signal"] }
tokio = { version = "1", features = ["full"] }
tokio-stream = { version = "0.1", features = ["io-util", "sync", "net"] }
tracing = "0.1"
tracing-error = "0.2"
tracing-futures = "0.2"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
yajrc = { version = "*", git = "https://github.com/dr-bonez/yajrc.git", branch = "develop" }
[target.'cfg(target_os = "linux")'.dependencies]
procfs = "0.15"
[profile.test]
opt-level = 3
[profile.dev.package.backtrace]
opt-level = 3

View File

@@ -1,214 +0,0 @@
use nix::unistd::Pid;
use serde::{Deserialize, Serialize, Serializer};
use yajrc::RpcMethod;
/// Know what the process is called
#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ProcessId(pub u32);
impl From<ProcessId> for Pid {
fn from(pid: ProcessId) -> Self {
Pid::from_raw(pid.0 as i32)
}
}
impl From<Pid> for ProcessId {
fn from(pid: Pid) -> Self {
ProcessId(pid.as_raw() as u32)
}
}
impl From<i32> for ProcessId {
fn from(pid: i32) -> Self {
ProcessId(pid as u32)
}
}
#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ProcessGroupId(pub u32);
#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[serde(rename_all = "kebab-case")]
pub enum OutputStrategy {
Inherit,
Collect,
}
#[derive(Debug, Clone, Copy)]
pub struct RunCommand;
impl Serialize for RunCommand {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
Serialize::serialize(Self.as_str(), serializer)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RunCommandParams {
pub gid: Option<ProcessGroupId>,
pub command: String,
pub args: Vec<String>,
pub output: OutputStrategy,
}
impl RpcMethod for RunCommand {
type Params = RunCommandParams;
type Response = ProcessId;
fn as_str<'a>(&'a self) -> &'a str {
"command"
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum LogLevel {
Trace(String),
Warn(String),
Error(String),
Info(String),
Debug(String),
}
impl LogLevel {
pub fn trace(&self) {
match self {
LogLevel::Trace(x) => tracing::trace!("{}", x),
LogLevel::Warn(x) => tracing::warn!("{}", x),
LogLevel::Error(x) => tracing::error!("{}", x),
LogLevel::Info(x) => tracing::info!("{}", x),
LogLevel::Debug(x) => tracing::debug!("{}", x),
}
}
}
#[derive(Debug, Clone, Copy)]
pub struct Log;
impl Serialize for Log {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
Serialize::serialize(Self.as_str(), serializer)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LogParams {
pub gid: Option<ProcessGroupId>,
pub level: LogLevel,
}
impl RpcMethod for Log {
type Params = LogParams;
type Response = ();
fn as_str<'a>(&'a self) -> &'a str {
"log"
}
}
#[derive(Debug, Clone, Copy)]
pub struct ReadLineStdout;
impl Serialize for ReadLineStdout {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
Serialize::serialize(Self.as_str(), serializer)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReadLineStdoutParams {
pub pid: ProcessId,
}
impl RpcMethod for ReadLineStdout {
type Params = ReadLineStdoutParams;
type Response = String;
fn as_str<'a>(&'a self) -> &'a str {
"read-line-stdout"
}
}
#[derive(Debug, Clone, Copy)]
pub struct ReadLineStderr;
impl Serialize for ReadLineStderr {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
Serialize::serialize(Self.as_str(), serializer)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReadLineStderrParams {
pub pid: ProcessId,
}
impl RpcMethod for ReadLineStderr {
type Params = ReadLineStderrParams;
type Response = String;
fn as_str<'a>(&'a self) -> &'a str {
"read-line-stderr"
}
}
#[derive(Debug, Clone, Copy)]
pub struct Output;
impl Serialize for Output {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
Serialize::serialize(Self.as_str(), serializer)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OutputParams {
pub pid: ProcessId,
}
impl RpcMethod for Output {
type Params = OutputParams;
type Response = String;
fn as_str<'a>(&'a self) -> &'a str {
"output"
}
}
#[derive(Debug, Clone, Copy)]
pub struct SendSignal;
impl Serialize for SendSignal {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
Serialize::serialize(Self.as_str(), serializer)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SendSignalParams {
pub pid: ProcessId,
pub signal: u32,
}
impl RpcMethod for SendSignal {
type Params = SendSignalParams;
type Response = ();
fn as_str<'a>(&'a self) -> &'a str {
"signal"
}
}
#[derive(Debug, Clone, Copy)]
pub struct SignalGroup;
impl Serialize for SignalGroup {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
Serialize::serialize(Self.as_str(), serializer)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SignalGroupParams {
pub gid: ProcessGroupId,
pub signal: u32,
}
impl RpcMethod for SignalGroup {
type Params = SignalGroupParams;
type Response = ();
fn as_str<'a>(&'a self) -> &'a str {
"signal-group"
}
}

View File

@@ -1,428 +0,0 @@
use std::collections::BTreeMap;
use std::ops::DerefMut;
use std::os::unix::process::ExitStatusExt;
use std::process::Stdio;
use std::sync::Arc;
use embassy_container_init::{
LogParams, OutputParams, OutputStrategy, ProcessGroupId, ProcessId, RunCommandParams,
SendSignalParams, SignalGroupParams,
};
use futures::StreamExt;
use helpers::NonDetachingJoinHandle;
use nix::errno::Errno;
use nix::sys::signal::Signal;
use serde::{Deserialize, Serialize};
use serde_json::json;
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
use tokio::process::{Child, Command};
use tokio::select;
use tokio::sync::{watch, Mutex};
use yajrc::{Id, RpcError};
/// Outputs embedded in the JSONRpc output of the executable.
#[derive(Debug, Clone, Serialize)]
#[serde(untagged)]
enum Output {
Command(ProcessId),
ReadLineStdout(String),
ReadLineStderr(String),
Output(String),
Log,
Signal,
SignalGroup,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "method", content = "params", rename_all = "kebab-case")]
enum Input {
/// Run a new command, with the args
Command(RunCommandParams),
/// Want to log locall on the service rather than the eos
Log(LogParams),
// /// Get a line of stdout from the command
// ReadLineStdout(ReadLineStdoutParams),
// /// Get a line of stderr from the command
// ReadLineStderr(ReadLineStderrParams),
/// Get output of command
Output(OutputParams),
/// Send the sigterm to the process
Signal(SendSignalParams),
/// Signal a group of processes
SignalGroup(SignalGroupParams),
}
#[derive(Deserialize)]
struct IncomingRpc {
id: Id,
#[serde(flatten)]
input: Input,
}
struct ChildInfo {
gid: Option<ProcessGroupId>,
child: Arc<Mutex<Option<Child>>>,
output: Option<InheritOutput>,
}
struct InheritOutput {
_thread: NonDetachingJoinHandle<()>,
stdout: watch::Receiver<String>,
stderr: watch::Receiver<String>,
}
struct HandlerMut {
processes: BTreeMap<ProcessId, ChildInfo>,
// groups: BTreeMap<ProcessGroupId, Cgroup>,
}
#[derive(Clone)]
struct Handler {
children: Arc<Mutex<HandlerMut>>,
}
impl Handler {
fn new() -> Self {
Handler {
children: Arc::new(Mutex::new(HandlerMut {
processes: BTreeMap::new(),
// groups: BTreeMap::new(),
})),
}
}
async fn handle(&self, req: Input) -> Result<Output, RpcError> {
Ok(match req {
Input::Command(RunCommandParams {
gid,
command,
args,
output,
}) => Output::Command(self.command(gid, command, args, output).await?),
// Input::ReadLineStdout(ReadLineStdoutParams { pid }) => {
// Output::ReadLineStdout(self.read_line_stdout(pid).await?)
// }
// Input::ReadLineStderr(ReadLineStderrParams { pid }) => {
// Output::ReadLineStderr(self.read_line_stderr(pid).await?)
// }
Input::Log(LogParams { gid: _, level }) => {
level.trace();
Output::Log
}
Input::Output(OutputParams { pid }) => Output::Output(self.output(pid).await?),
Input::Signal(SendSignalParams { pid, signal }) => {
self.signal(pid, signal).await?;
Output::Signal
}
Input::SignalGroup(SignalGroupParams { gid, signal }) => {
self.signal_group(gid, signal).await?;
Output::SignalGroup
}
})
}
async fn command(
&self,
gid: Option<ProcessGroupId>,
command: String,
args: Vec<String>,
output: OutputStrategy,
) -> Result<ProcessId, RpcError> {
let mut cmd = Command::new(command);
cmd.args(args);
cmd.kill_on_drop(true);
cmd.stdout(Stdio::piped());
cmd.stderr(Stdio::piped());
let mut child = cmd.spawn().map_err(|e| {
let mut err = yajrc::INTERNAL_ERROR.clone();
err.data = Some(json!(e.to_string()));
err
})?;
let pid = ProcessId(child.id().ok_or_else(|| {
let mut err = yajrc::INTERNAL_ERROR.clone();
err.data = Some(json!("Child has no pid"));
err
})?);
let output = match output {
OutputStrategy::Inherit => {
let (stdout_send, stdout) = watch::channel(String::new());
let (stderr_send, stderr) = watch::channel(String::new());
if let (Some(child_stdout), Some(child_stderr)) =
(child.stdout.take(), child.stderr.take())
{
Some(InheritOutput {
_thread: tokio::spawn(async move {
tokio::join!(
async {
if let Err(e) = async {
let mut lines = BufReader::new(child_stdout).lines();
while let Some(line) = lines.next_line().await? {
tracing::info!("({}): {}", pid.0, line);
let _ = stdout_send.send(line);
}
Ok::<_, std::io::Error>(())
}
.await
{
tracing::error!(
"Error reading stdout of pid {}: {}",
pid.0,
e
);
}
},
async {
if let Err(e) = async {
let mut lines = BufReader::new(child_stderr).lines();
while let Some(line) = lines.next_line().await? {
tracing::warn!("({}): {}", pid.0, line);
let _ = stderr_send.send(line);
}
Ok::<_, std::io::Error>(())
}
.await
{
tracing::error!(
"Error reading stdout of pid {}: {}",
pid.0,
e
);
}
}
);
})
.into(),
stdout,
stderr,
})
} else {
None
}
}
OutputStrategy::Collect => None,
};
self.children.lock().await.processes.insert(
pid,
ChildInfo {
gid,
child: Arc::new(Mutex::new(Some(child))),
output,
},
);
Ok(pid)
}
async fn output(&self, pid: ProcessId) -> Result<String, RpcError> {
let not_found = || {
let mut err = yajrc::INTERNAL_ERROR.clone();
err.data = Some(json!(format!("Child with pid {} not found", pid.0)));
err
};
let mut child = {
self.children
.lock()
.await
.processes
.get(&pid)
.ok_or_else(not_found)?
.child
.clone()
}
.lock_owned()
.await;
if let Some(child) = child.take() {
let output = child.wait_with_output().await?;
if output.status.success() {
Ok(String::from_utf8(output.stdout).map_err(|_| yajrc::PARSE_ERROR)?)
} else {
Err(RpcError {
code: output
.status
.code()
.or_else(|| output.status.signal().map(|s| 128 + s))
.unwrap_or(0),
message: "Command failed".into(),
data: Some(json!(String::from_utf8(if output.stderr.is_empty() {
output.stdout
} else {
output.stderr
})
.map_err(|_| yajrc::PARSE_ERROR)?)),
})
}
} else {
Err(not_found())
}
}
async fn signal(&self, pid: ProcessId, signal: u32) -> Result<(), RpcError> {
let not_found = || {
let mut err = yajrc::INTERNAL_ERROR.clone();
err.data = Some(json!(format!("Child with pid {} not found", pid.0)));
err
};
Self::killall(pid, Signal::try_from(signal as i32)?)?;
if signal == 9 {
self.children
.lock()
.await
.processes
.remove(&pid)
.ok_or_else(not_found)?;
}
Ok(())
}
async fn signal_group(&self, gid: ProcessGroupId, signal: u32) -> Result<(), RpcError> {
let mut to_kill = Vec::new();
{
let mut children_ref = self.children.lock().await;
let children = std::mem::take(&mut children_ref.deref_mut().processes);
for (pid, child_info) in children {
if child_info.gid == Some(gid) {
to_kill.push(pid);
} else {
children_ref.processes.insert(pid, child_info);
}
}
}
for pid in to_kill {
tracing::info!("Killing pid {}", pid.0);
Self::killall(pid, Signal::try_from(signal as i32)?)?;
}
Ok(())
}
fn killall(pid: ProcessId, signal: Signal) -> Result<(), RpcError> {
for proc in procfs::process::all_processes()? {
let stat = proc?.stat()?;
if ProcessId::from(stat.ppid) == pid {
Self::killall(stat.pid.into(), signal)?;
}
}
if let Err(e) = nix::sys::signal::kill(pid.into(), Some(signal)) {
if e != Errno::ESRCH {
tracing::error!("Failed to kill pid {}: {}", pid.0, e);
}
}
Ok(())
}
async fn graceful_exit(self) {
let kill_all = futures::stream::iter(
std::mem::take(&mut self.children.lock().await.deref_mut().processes).into_iter(),
)
.for_each_concurrent(None, |(pid, child)| async move {
let _ = Self::killall(pid, Signal::SIGTERM);
if let Some(child) = child.child.lock().await.take() {
let _ = child.wait_with_output().await;
}
});
kill_all.await
}
}
#[tokio::main]
async fn main() {
use tokio::signal::unix::{signal, SignalKind};
let mut sigint = signal(SignalKind::interrupt()).unwrap();
let mut sigterm = signal(SignalKind::terminate()).unwrap();
let mut sigquit = signal(SignalKind::quit()).unwrap();
let mut sighangup = signal(SignalKind::hangup()).unwrap();
use tracing_error::ErrorLayer;
use tracing_subscriber::prelude::*;
use tracing_subscriber::{fmt, EnvFilter};
let filter_layer = EnvFilter::new("embassy_container_init=debug");
let fmt_layer = fmt::layer().with_target(true);
tracing_subscriber::registry()
.with(filter_layer)
.with(fmt_layer)
.with(ErrorLayer::default())
.init();
color_eyre::install().unwrap();
let handler = Handler::new();
let handler_thread = async {
let listener = tokio::net::UnixListener::bind("/start9/sockets/rpc.sock")?;
loop {
let (stream, _) = listener.accept().await?;
let (r, w) = stream.into_split();
let mut lines = BufReader::new(r).lines();
let handler = handler.clone();
tokio::spawn(async move {
let w = Arc::new(Mutex::new(w));
while let Some(line) = lines.next_line().await.transpose() {
let handler = handler.clone();
let w = w.clone();
tokio::spawn(async move {
if let Err(e) = async {
let req = serde_json::from_str::<IncomingRpc>(&line?)?;
match handler.handle(req.input).await {
Ok(output) => {
if w.lock().await.write_all(
format!("{}\n", json!({ "id": req.id, "jsonrpc": "2.0", "result": output }))
.as_bytes(),
)
.await.is_err() {
tracing::error!("Error sending to {id:?}", id = req.id);
}
}
Err(e) =>
if w
.lock()
.await
.write_all(
format!("{}\n", json!({ "id": req.id, "jsonrpc": "2.0", "error": e }))
.as_bytes(),
)
.await.is_err() {
tracing::error!("Handle + Error sending to {id:?}", id = req.id);
},
}
Ok::<_, color_eyre::Report>(())
}
.await
{
tracing::error!("Error parsing RPC request: {}", e);
tracing::debug!("{:?}", e);
}
});
}
Ok::<_, std::io::Error>(())
});
}
#[allow(unreachable_code)]
Ok::<_, std::io::Error>(())
};
select! {
res = handler_thread => {
match res {
Ok(()) => tracing::debug!("Done with inputs/outputs"),
Err(e) => {
tracing::error!("Error reading RPC input: {}", e);
tracing::debug!("{:?}", e);
}
}
},
_ = sigint.recv() => {
tracing::debug!("SIGINT");
},
_ = sigterm.recv() => {
tracing::debug!("SIGTERM");
},
_ = sigquit.recv() => {
tracing::debug!("SIGQUIT");
},
_ = sighangup.recv() => {
tracing::debug!("SIGHUP");
}
}
handler.graceful_exit().await;
::std::process::exit(0)
}

View File

@@ -1,20 +0,0 @@
[package]
name = "helpers"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
async-trait = "0.1.64"
color-eyre = "0.6.2"
futures = "0.3.28"
lazy_async_pool = "0.3.3"
models = { path = "../models" }
pin-project = "1.1.3"
serde = { version = "1.0", features = ["derive", "rc"] }
serde_json = "1.0"
tokio = { version = "1", features = ["full"] }
tokio-stream = { version = "0.1.14", features = ["io-util", "sync"] }
tracing = "0.1.39"
yajrc = { version = "*", git = "https://github.com/dr-bonez/yajrc.git", branch = "develop" }

View File

@@ -1,31 +0,0 @@
use std::task::Poll;
use tokio::io::{AsyncRead, ReadBuf};
#[pin_project::pin_project]
pub struct ByteReplacementReader<R> {
pub replace: u8,
pub with: u8,
#[pin]
pub inner: R,
}
impl<R: AsyncRead> AsyncRead for ByteReplacementReader<R> {
fn poll_read(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut ReadBuf<'_>,
) -> std::task::Poll<std::io::Result<()>> {
let this = self.project();
match this.inner.poll_read(cx, buf) {
Poll::Ready(Ok(())) => {
for idx in 0..buf.filled().len() {
if buf.filled()[idx] == *this.replace {
buf.filled_mut()[idx] = *this.with;
}
}
Poll::Ready(Ok(()))
}
a => a,
}
}
}

View File

@@ -1,264 +0,0 @@
use std::future::Future;
use std::ops::{Deref, DerefMut};
use std::path::{Path, PathBuf};
use std::time::Duration;
use color_eyre::eyre::{eyre, Context, Error};
use futures::future::BoxFuture;
use futures::FutureExt;
use tokio::fs::File;
use tokio::sync::oneshot;
use tokio::task::{JoinError, JoinHandle, LocalSet};
mod byte_replacement_reader;
mod os_api;
mod rpc_client;
mod rsync;
mod script_dir;
pub use byte_replacement_reader::*;
pub use os_api::*;
pub use rpc_client::{RpcClient, UnixRpcClient};
pub use rsync::*;
pub use script_dir::*;
pub fn const_true() -> bool {
true
}
pub fn to_tmp_path(path: impl AsRef<Path>) -> Result<PathBuf, Error> {
let path = path.as_ref();
if let (Some(parent), Some(file_name)) =
(path.parent(), path.file_name().and_then(|f| f.to_str()))
{
Ok(parent.join(format!(".{}.tmp", file_name)))
} else {
Err(eyre!("invalid path: {}", path.display()))
}
}
pub async fn canonicalize(
path: impl AsRef<Path> + Send + Sync,
create_parent: bool,
) -> Result<PathBuf, Error> {
fn create_canonical_folder<'a>(
path: impl AsRef<Path> + Send + Sync + 'a,
) -> BoxFuture<'a, Result<PathBuf, Error>> {
async move {
let path = canonicalize(path, true).await?;
tokio::fs::create_dir(&path)
.await
.with_context(|| path.display().to_string())?;
Ok(path)
}
.boxed()
}
let path = path.as_ref();
if tokio::fs::metadata(path).await.is_err() {
if let (Some(parent), Some(file_name)) = (path.parent(), path.file_name()) {
if create_parent && tokio::fs::metadata(parent).await.is_err() {
return Ok(create_canonical_folder(parent).await?.join(file_name));
} else {
return Ok(tokio::fs::canonicalize(parent)
.await
.with_context(|| parent.display().to_string())?
.join(file_name));
}
}
}
tokio::fs::canonicalize(&path)
.await
.with_context(|| path.display().to_string())
}
#[pin_project::pin_project(PinnedDrop)]
pub struct NonDetachingJoinHandle<T>(#[pin] JoinHandle<T>);
impl<T> NonDetachingJoinHandle<T> {
pub async fn wait_for_abort(self) -> Result<T, JoinError> {
self.abort();
self.await
}
}
impl<T> From<JoinHandle<T>> for NonDetachingJoinHandle<T> {
fn from(t: JoinHandle<T>) -> Self {
NonDetachingJoinHandle(t)
}
}
impl<T> Deref for NonDetachingJoinHandle<T> {
type Target = JoinHandle<T>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> DerefMut for NonDetachingJoinHandle<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
#[pin_project::pinned_drop]
impl<T> PinnedDrop for NonDetachingJoinHandle<T> {
fn drop(self: std::pin::Pin<&mut Self>) {
let this = self.project();
this.0.into_ref().get_ref().abort()
}
}
impl<T> Future for NonDetachingJoinHandle<T> {
type Output = Result<T, JoinError>;
fn poll(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Self::Output> {
let this = self.project();
this.0.poll(cx)
}
}
pub struct AtomicFile {
tmp_path: PathBuf,
path: PathBuf,
file: Option<File>,
}
impl AtomicFile {
pub async fn new(
path: impl AsRef<Path> + Send + Sync,
tmp_path: Option<impl AsRef<Path> + Send + Sync>,
) -> Result<Self, Error> {
let path = canonicalize(&path, true).await?;
let tmp_path = if let Some(tmp_path) = tmp_path {
canonicalize(&tmp_path, true).await?
} else {
to_tmp_path(&path)?
};
let file = File::create(&tmp_path)
.await
.with_context(|| tmp_path.display().to_string())?;
Ok(Self {
tmp_path,
path,
file: Some(file),
})
}
pub async fn rollback(mut self) -> Result<(), Error> {
drop(self.file.take());
tokio::fs::remove_file(&self.tmp_path)
.await
.with_context(|| format!("rm {}", self.tmp_path.display()))?;
Ok(())
}
pub async fn save(mut self) -> Result<(), Error> {
use tokio::io::AsyncWriteExt;
if let Some(file) = self.file.as_mut() {
file.flush().await?;
file.shutdown().await?;
file.sync_all().await?;
}
drop(self.file.take());
tokio::fs::rename(&self.tmp_path, &self.path)
.await
.with_context(|| {
format!("mv {} -> {}", self.tmp_path.display(), self.path.display())
})?;
Ok(())
}
}
impl std::ops::Deref for AtomicFile {
type Target = File;
fn deref(&self) -> &Self::Target {
self.file.as_ref().unwrap()
}
}
impl std::ops::DerefMut for AtomicFile {
fn deref_mut(&mut self) -> &mut Self::Target {
self.file.as_mut().unwrap()
}
}
impl Drop for AtomicFile {
fn drop(&mut self) {
if let Some(file) = self.file.take() {
drop(file);
let path = std::mem::take(&mut self.tmp_path);
tokio::spawn(async move { tokio::fs::remove_file(path).await.unwrap() });
}
}
}
pub struct TimedResource<T: 'static + Send> {
handle: NonDetachingJoinHandle<Option<T>>,
ready: oneshot::Sender<()>,
}
impl<T: 'static + Send> TimedResource<T> {
pub fn new(resource: T, timer: Duration) -> Self {
let (send, recv) = oneshot::channel();
let handle = tokio::spawn(async move {
tokio::select! {
_ = tokio::time::sleep(timer) => {
drop(resource);
None
},
_ = recv => Some(resource),
}
});
Self {
handle: handle.into(),
ready: send,
}
}
pub fn new_with_destructor<
Fn: FnOnce(T) -> Fut + Send + 'static,
Fut: Future<Output = ()> + Send,
>(
resource: T,
timer: Duration,
destructor: Fn,
) -> Self {
let (send, recv) = oneshot::channel();
let handle = tokio::spawn(async move {
tokio::select! {
_ = tokio::time::sleep(timer) => {
destructor(resource).await;
None
},
_ = recv => Some(resource),
}
});
Self {
handle: handle.into(),
ready: send,
}
}
pub async fn get(self) -> Option<T> {
let _ = self.ready.send(());
self.handle.await.unwrap()
}
pub fn is_timed_out(&self) -> bool {
self.ready.is_closed()
}
}
pub async fn spawn_local<
T: 'static + Send,
F: FnOnce() -> Fut + Send + 'static,
Fut: Future<Output = T> + 'static,
>(
fut: F,
) -> NonDetachingJoinHandle<T> {
let (send, recv) = tokio::sync::oneshot::channel();
std::thread::spawn(move || {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(async move {
let set = LocalSet::new();
send.send(set.spawn_local(fut()).into())
.unwrap_or_else(|_| unreachable!());
set.await
})
});
recv.await.unwrap()
}

View File

@@ -1,83 +0,0 @@
use std::sync::Arc;
use color_eyre::eyre::eyre;
use color_eyre::Report;
use models::InterfaceId;
use models::PackageId;
use serde_json::Value;
use tokio::sync::mpsc;
pub struct RuntimeDropped;
pub struct Callback {
id: Arc<String>,
sender: mpsc::UnboundedSender<(Arc<String>, Vec<Value>)>,
}
impl Callback {
pub fn new(id: String, sender: mpsc::UnboundedSender<(Arc<String>, Vec<Value>)>) -> Self {
Self {
id: Arc::new(id),
sender,
}
}
pub fn is_listening(&self) -> bool {
self.sender.is_closed()
}
pub fn call(&self, args: Vec<Value>) -> Result<(), RuntimeDropped> {
self.sender
.send((self.id.clone(), args))
.map_err(|_| RuntimeDropped)
}
}
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct AddressSchemaOnion {
pub id: InterfaceId,
pub external_port: u16,
}
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct AddressSchemaLocal {
pub id: InterfaceId,
pub external_port: u16,
}
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Address(pub String);
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Domain;
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Name;
#[async_trait::async_trait]
#[allow(unused_variables)]
pub trait OsApi: Send + Sync + 'static {
async fn get_service_config(
&self,
id: PackageId,
path: &str,
callback: Option<Callback>,
) -> Result<Vec<Value>, Report>;
async fn bind_local(
&self,
internal_port: u16,
address_schema: AddressSchemaLocal,
) -> Result<Address, Report>;
async fn bind_onion(
&self,
internal_port: u16,
address_schema: AddressSchemaOnion,
) -> Result<Address, Report>;
async fn unbind_local(&self, id: InterfaceId, external: u16) -> Result<(), Report>;
async fn unbind_onion(&self, id: InterfaceId, external: u16) -> Result<(), Report>;
fn set_started(&self) -> Result<(), Report>;
async fn restart(&self) -> Result<(), Report>;
async fn start(&self) -> Result<(), Report>;
async fn stop(&self) -> Result<(), Report>;
}

View File

@@ -1,192 +0,0 @@
use std::collections::BTreeMap;
use std::path::PathBuf;
use std::sync::atomic::AtomicUsize;
use std::sync::{Arc, Weak};
use futures::future::BoxFuture;
use futures::{FutureExt, TryFutureExt};
use lazy_async_pool::Pool;
use models::{Error, ErrorKind, ResultExt};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use tokio::io::{AsyncBufReadExt, AsyncRead, AsyncWrite, AsyncWriteExt, BufReader};
use tokio::net::UnixStream;
use tokio::runtime::Handle;
use tokio::sync::{oneshot, Mutex};
use yajrc::{Id, RpcError, RpcMethod, RpcRequest, RpcResponse};
use crate::NonDetachingJoinHandle;
type DynWrite = Box<dyn AsyncWrite + Unpin + Send + Sync + 'static>;
type ResponseMap = BTreeMap<Id, oneshot::Sender<Result<Value, RpcError>>>;
const MAX_TRIES: u64 = 3;
pub struct RpcClient {
id: Arc<AtomicUsize>,
_handler: NonDetachingJoinHandle<()>,
writer: DynWrite,
responses: Weak<Mutex<ResponseMap>>,
}
impl RpcClient {
pub fn new<
W: AsyncWrite + Unpin + Send + Sync + 'static,
R: AsyncRead + Unpin + Send + Sync + 'static,
>(
writer: W,
reader: R,
id: Arc<AtomicUsize>,
) -> Self {
let writer: DynWrite = Box::new(writer);
let responses = Arc::new(Mutex::new(ResponseMap::new()));
let weak_responses = Arc::downgrade(&responses);
RpcClient {
id,
_handler: tokio::spawn(async move {
let mut lines = BufReader::new(reader).lines();
while let Some(line) = lines.next_line().await.transpose() {
match line.map_err(Error::from).and_then(|l| {
serde_json::from_str::<RpcResponse>(&l)
.with_kind(ErrorKind::Deserialization)
}) {
Ok(l) => {
if let Some(id) = l.id {
if let Some(res) = responses.lock().await.remove(&id) {
if let Err(e) = res.send(l.result) {
tracing::warn!(
"RpcClient Response for Unknown ID: {:?}",
e
);
}
} else {
tracing::warn!(
"RpcClient Response for Unknown ID: {:?}",
l.result
);
}
} else {
tracing::info!("RpcClient Notification: {:?}", l);
}
}
Err(e) => {
tracing::error!("RpcClient Error: {}", e);
tracing::debug!("{:?}", e);
}
}
}
})
.into(),
writer,
responses: weak_responses,
}
}
pub async fn request<T: RpcMethod>(
&mut self,
method: T,
params: T::Params,
) -> Result<T::Response, RpcError>
where
T: Serialize,
T::Params: Serialize,
T::Response: for<'de> Deserialize<'de>,
{
let id = Id::Number(
self.id
.fetch_add(1, std::sync::atomic::Ordering::SeqCst)
.into(),
);
let request = RpcRequest {
id: Some(id.clone()),
method,
params,
};
if let Some(w) = self.responses.upgrade() {
let (send, recv) = oneshot::channel();
w.lock().await.insert(id.clone(), send);
self.writer
.write_all((serde_json::to_string(&request)? + "\n").as_bytes())
.await
.map_err(|e| {
let mut err = yajrc::INTERNAL_ERROR.clone();
err.data = Some(json!(e.to_string()));
err
})?;
match recv.await {
Ok(val) => {
return Ok(serde_json::from_value(val?)?);
}
Err(_err) => {
tokio::task::yield_now().await;
}
}
}
tracing::debug!(
"Client has finished {:?}",
futures::poll!(&mut self._handler)
);
let mut err = yajrc::INTERNAL_ERROR.clone();
err.data = Some(json!("RpcClient thread has terminated"));
Err(err)
}
}
pub struct UnixRpcClient {
pool: Pool<
RpcClient,
Box<dyn Fn() -> BoxFuture<'static, Result<RpcClient, std::io::Error>> + Send + Sync>,
BoxFuture<'static, Result<RpcClient, std::io::Error>>,
std::io::Error,
>,
}
impl UnixRpcClient {
pub fn new(path: PathBuf) -> Self {
let rt = Handle::current();
let id = Arc::new(AtomicUsize::new(0));
Self {
pool: Pool::new(
0,
Box::new(move || {
let path = path.clone();
let id = id.clone();
rt.spawn(async move {
let (r, w) = UnixStream::connect(&path).await?.into_split();
Ok(RpcClient::new(w, r, id))
})
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
.and_then(|x| async move { x })
.boxed()
}),
),
}
}
pub async fn request<T: RpcMethod>(
&self,
method: T,
params: T::Params,
) -> Result<T::Response, RpcError>
where
T: Serialize + Clone,
T::Params: Serialize + Clone,
T::Response: for<'de> Deserialize<'de>,
{
let mut tries = 0;
let res = loop {
tries += 1;
let mut client = self.pool.clone().get().await?;
let res = client.request(method.clone(), params.clone()).await;
match &res {
Err(e) if e.code == yajrc::INTERNAL_ERROR.code => {
client.destroy();
}
_ => break res,
}
if tries > MAX_TRIES {
tracing::warn!("Max Tries exceeded");
break res;
}
};
res
}
}

View File

@@ -1,227 +0,0 @@
use std::path::Path;
use color_eyre::eyre::eyre;
use futures::StreamExt;
use models::{Error, ErrorKind};
use tokio::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
use tokio::process::{Child, Command};
use tokio::sync::watch;
use tokio_stream::wrappers::WatchStream;
use crate::{const_true, ByteReplacementReader, NonDetachingJoinHandle};
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RsyncOptions {
#[serde(default = "const_true")]
pub delete: bool,
#[serde(default = "const_true")]
pub force: bool,
#[serde(default)]
pub ignore_existing: bool,
#[serde(default)]
pub exclude: Vec<String>,
#[serde(default = "const_true")]
pub no_permissions: bool,
#[serde(default = "const_true")]
pub no_owner: bool,
}
impl Default for RsyncOptions {
fn default() -> Self {
Self {
delete: true,
force: true,
ignore_existing: false,
exclude: Vec::new(),
no_permissions: false,
no_owner: false,
}
}
}
pub struct Rsync {
pub command: Child,
_progress_task: NonDetachingJoinHandle<Result<(), Error>>,
stderr: NonDetachingJoinHandle<Result<String, Error>>,
pub progress: WatchStream<f64>,
}
impl Rsync {
pub async fn new(
src: impl AsRef<Path>,
dst: impl AsRef<Path>,
options: RsyncOptions,
) -> Result<Self, Error> {
let mut cmd = Command::new("rsync");
if options.delete {
cmd.arg("--delete");
}
if options.force {
cmd.arg("--force");
}
if options.ignore_existing {
cmd.arg("--ignore-existing");
}
if options.no_permissions {
cmd.arg("--no-perms");
}
if options.no_owner {
cmd.arg("--no-owner");
}
for exclude in options.exclude {
cmd.arg(format!("--exclude={}", exclude));
}
if options.no_permissions {
cmd.arg("--no-perms");
}
let mut command = cmd
.arg("-actAXH")
.arg("--info=progress2")
.arg("--no-inc-recursive")
.arg(src.as_ref())
.arg(dst.as_ref())
.kill_on_drop(true)
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.spawn()?;
let cmd_stdout = match command.stdout.take() {
None => {
return Err(Error::new(
eyre!("rsync command stdout is none"),
ErrorKind::Filesystem,
))
}
Some(a) => a,
};
let mut cmd_stderr = match command.stderr.take() {
None => {
return Err(Error::new(
eyre!("rsync command stderr is none"),
ErrorKind::Filesystem,
))
}
Some(a) => a,
};
let (send, recv) = watch::channel(0.0);
let stderr = tokio::spawn(async move {
let mut res = String::new();
cmd_stderr.read_to_string(&mut res).await?;
Ok(res)
})
.into();
let progress_task = tokio::spawn(async move {
let mut lines = BufReader::new(ByteReplacementReader {
replace: b'\r',
with: b'\n',
inner: cmd_stdout,
})
.lines();
while let Some(line) = lines.next_line().await? {
if let Some(percentage) = parse_percentage(&line) {
if percentage > 0.0 {
if let Err(err) = send.send(percentage / 100.0) {
return Err(Error::new(
eyre!("rsync progress send error: {}", err),
ErrorKind::Filesystem,
));
}
}
}
}
Ok(())
})
.into();
let mut progress = WatchStream::new(recv);
progress.next().await;
Ok(Rsync {
command,
_progress_task: progress_task,
stderr,
progress,
})
}
pub async fn wait(mut self) -> Result<(), Error> {
let status = self.command.wait().await?;
let stderr = match self.stderr.await {
Err(err) => {
return Err(Error::new(
eyre!("rsync stderr error: {}", err),
ErrorKind::Filesystem,
))
}
Ok(a) => a?,
};
if status.success() {
tracing::info!("rsync: {}", stderr);
} else {
return Err(Error::new(
eyre!(
"rsync error: {} {} ",
status.code().map(|x| x.to_string()).unwrap_or_default(),
stderr
),
ErrorKind::Filesystem,
));
}
Ok(())
}
}
fn parse_percentage(line: &str) -> Option<f64> {
if let Some(percentage) = line
.split_ascii_whitespace()
.find_map(|col| col.strip_suffix("%"))
{
return percentage.parse().ok();
}
None
}
#[test]
fn test_parse() {
let input = " 1.07G 57% 95.20MB/s 0:00:10 (xfr#1, to-chk=0/2)";
assert_eq!(Some(57.0), parse_percentage(input));
}
#[tokio::test]
async fn test_rsync() {
use futures::StreamExt;
use tokio::fs;
let mut seen_zero = false;
let mut seen_in_between = false;
let mut seen_hundred = false;
fs::remove_dir_all("/tmp/test_rsync")
.await
.unwrap_or_default();
fs::create_dir_all("/tmp/test_rsync/a").await.unwrap();
fs::create_dir_all("/tmp/test_rsync/b").await.unwrap();
for i in 0..100 {
tokio::io::copy(
&mut fs::File::open("/dev/urandom").await.unwrap().take(100_000),
&mut fs::File::create(format!("/tmp/test_rsync/a/sample.{i}.bin"))
.await
.unwrap(),
)
.await
.unwrap();
}
let mut rsync = Rsync::new(
"/tmp/test_rsync/a/",
"/tmp/test_rsync/b/",
Default::default(),
)
.await
.unwrap();
while let Some(progress) = rsync.progress.next().await {
if progress <= 0.05 {
seen_zero = true;
} else if progress > 0.05 && progress < 1.0 {
seen_in_between = true
} else {
seen_hundred = true;
}
}
rsync.wait().await.unwrap();
assert!(seen_zero, "seen zero");
assert!(seen_in_between, "seen in between 0 and 100");
assert!(seen_hundred, "seen 100");
}

View File

@@ -1,13 +0,0 @@
use std::path::{Path, PathBuf};
use models::{PackageId, Version};
pub const PKG_SCRIPT_DIR: &str = "package-data/scripts";
pub fn script_dir<P: AsRef<Path>>(datadir: P, pkg_id: &PackageId, version: &Version) -> PathBuf {
datadir
.as_ref()
.join(&*PKG_SCRIPT_DIR)
.join(pkg_id)
.join(version.as_str())
}

View File

@@ -1,24 +0,0 @@
[package]
name = "js_engine"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
async-trait = "0.1.74"
dashmap = "5.5.3"
deno_core = "=0.222.0"
deno_ast = { version = "=0.29.5", features = ["transpiling"] }
embassy_container_init = { path = "../embassy_container_init" }
reqwest = { version = "0.11.22" }
sha2 = "0.10.8"
itertools = "0.11.0"
lazy_static = "1.4.0"
models = { path = "../models" }
helpers = { path = "../helpers" }
serde = { version = "1.0", features = ["derive", "rc"] }
serde_json = "1.0"
tokio = { version = "1", features = ["full"] }
tracing = "0.1"
pin-project = "1"

View File

@@ -1,369 +0,0 @@
import Deno from "/deno_global.js";
import * as mainModule from "/embassy.js";
function requireParam(param) {
throw new Error(`Missing required parameter ${param}`);
}
const callbackName = (() => {
let count = 0;
return () => `callback${count++}${Math.floor(Math.random() * 100000)}`;
})();
const callbackMapping = {};
const registerCallback = (fn) => {
const uuid = callbackName(); // TODO
callbackMapping[uuid] = fn;
return uuid;
};
/**
* This is using the simplified json pointer spec, using no escapes and arrays
* @param {object} obj
* @param {string} pointer
* @returns
*/
function jsonPointerValue(obj, pointer) {
const paths = pointer.substring(1).split("/");
for (const path of paths) {
if (obj == null) {
return null;
}
obj = (obj || {})[path];
}
return obj;
}
function maybeDate(value) {
if (!value) return value;
return new Date(value);
}
const writeFile = (
{
path = requireParam("path"),
volumeId = requireParam("volumeId"),
toWrite = requireParam("toWrite"),
} = requireParam("options"),
) => Deno.core.opAsync("write_file", volumeId, path, toWrite);
const readFile = (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } =
requireParam("options"),
) => Deno.core.opAsync("read_file", volumeId, path);
const runDaemon = (
{ command = requireParam("command"), args = [] } = requireParam("options"),
) => {
let id = Deno.core.opAsync("start_command", command, args, "inherit", null);
let processId = id.then((x) => x.processId);
let waitPromise = null;
return {
processId,
async wait() {
waitPromise = waitPromise ||
Deno.core.opAsync("wait_command", await processId);
return waitPromise;
},
async term(signal = 15) {
return Deno.core.opAsync("send_signal", await processId, 15);
},
};
};
const runCommand = async (
{ command = requireParam("command"), args = [], timeoutMillis = 30000 } =
requireParam("options"),
) => {
let id = Deno.core.opAsync(
"start_command",
command,
args,
"collect",
timeoutMillis,
);
let pid = id.then((x) => x.processId);
return Deno.core.opAsync("wait_command", await pid);
};
const bindLocal = async (
{
internalPort = requireParam("internalPort"),
name = requireParam("name"),
externalPort = requireParam("externalPort"),
} = requireParam("options"),
) => {
return Deno.core.opAsync("bind_local", internalPort, {
id: name,
externalPort,
});
};
const bindTor = async (
{
internalPort = requireParam("internalPort"),
name = requireParam("name"),
externalPort = requireParam("externalPort"),
} = requireParam("options"),
) => {
return Deno.core.opAsync("bind_onion", internalPort, {
id: name,
externalPort,
});
};
const signalGroup = async (
{ gid = requireParam("gid"), signal = requireParam("signal") } = requireParam(
"gid and signal",
),
) => {
return Deno.core.opAsync("signal_group", gid, signal);
};
const sleep = (timeMs = requireParam("timeMs")) =>
Deno.core.opAsync("sleep", timeMs);
const rename = (
{
srcVolume = requireParam("srcVolume"),
dstVolume = requirePapram("dstVolume"),
srcPath = requireParam("srcPath"),
dstPath = requireParam("dstPath"),
} = requireParam("options"),
) => Deno.core.opAsync("rename", srcVolume, srcPath, dstVolume, dstPath);
const metadata = async (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } =
requireParam("options"),
) => {
const data = await Deno.core.opAsync("metadata", volumeId, path);
return {
...data,
modified: maybeDate(data.modified),
created: maybeDate(data.created),
accessed: maybeDate(data.accessed),
};
};
const removeFile = (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } =
requireParam("options"),
) => Deno.core.opAsync("remove_file", volumeId, path);
const isSandboxed = () => Deno.core.ops["is_sandboxed"]();
const writeJsonFile = (
{
volumeId = requireParam("volumeId"),
path = requireParam("path"),
toWrite = requireParam("toWrite"),
} = requireParam("options"),
) =>
writeFile({
volumeId,
path,
toWrite: JSON.stringify(toWrite),
});
const chown = async (
{
volumeId = requireParam("volumeId"),
path = requireParam("path"),
uid = requireParam("uid"),
} = requireParam("options"),
) => {
return await Deno.core.opAsync("chown", volumeId, path, uid);
};
const chmod = async (
{
volumeId = requireParam("volumeId"),
path = requireParam("path"),
mode = requireParam("mode"),
} = requireParam("options"),
) => {
return await Deno.core.opAsync("chmod", volumeId, path, mode);
};
const readJsonFile = async (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } =
requireParam("options"),
) => JSON.parse(await readFile({ volumeId, path }));
const createDir = (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } =
requireParam("options"),
) => Deno.core.opAsync("create_dir", volumeId, path);
const readDir = (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } =
requireParam("options"),
) => Deno.core.opAsync("read_dir", volumeId, path);
const removeDir = (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } =
requireParam("options"),
) => Deno.core.opAsync("remove_dir", volumeId, path);
const trace = (whatToTrace = requireParam("whatToTrace")) =>
Deno.core.opAsync("log_trace", whatToTrace);
const warn = (whatToTrace = requireParam("whatToTrace")) =>
Deno.core.opAsync("log_warn", whatToTrace);
const error = (whatToTrace = requireParam("whatToTrace")) =>
Deno.core.opAsync("log_error", whatToTrace);
const debug = (whatToTrace = requireParam("whatToTrace")) =>
Deno.core.opAsync("log_debug", whatToTrace);
const info = (whatToTrace = requireParam("whatToTrace")) =>
Deno.core.opAsync("log_info", whatToTrace);
const fetch = async (url = requireParam("url"), options = null) => {
const { body, ...response } = await Deno.core.opAsync("fetch", url, options);
const textValue = Promise.resolve(body);
return {
...response,
text() {
return textValue;
},
json() {
return textValue.then((x) => JSON.parse(x));
},
};
};
const runRsync = (
{
srcVolume = requireParam("srcVolume"),
dstVolume = requireParam("dstVolume"),
srcPath = requireParam("srcPath"),
dstPath = requireParam("dstPath"),
options = requireParam("options"),
} = requireParam("options"),
) => {
let id = Deno.core.opAsync(
"rsync",
srcVolume,
srcPath,
dstVolume,
dstPath,
options,
);
let waitPromise = null;
return {
async id() {
return id;
},
async wait() {
waitPromise = waitPromise || Deno.core.opAsync("rsync_wait", await id);
return waitPromise;
},
async progress() {
return Deno.core.opAsync("rsync_progress", await id);
},
};
};
globalThis.runCallback = (uuid, args) => callbackMapping[uuid](...args);
const getServiceConfig = async (
{
serviceId = requireParam("serviceId"),
configPath = requireParam("configPath"),
onChange = requireParam("onChange"),
} = requireParam("options"),
) => {
return await Deno.core.opAsync(
"get_service_config",
serviceId,
configPath,
registerCallback(onChange),
);
};
const started = () => Deno.core.ops.set_started();
const restart = () => Deno.core.opAsync("restart");
const start = () => Deno.core.opAsync("start");
const stop = () => Deno.core.opAsync("stop");
const currentFunction = Deno.core.ops.current_function();
const input = Deno.core.ops.get_input();
const variable_args = Deno.core.ops.get_variable_args();
const setState = (x) => Deno.core.ops.set_value(x);
const effects = {
bindLocal,
bindTor,
chmod,
chown,
createDir,
debug,
diskUsage,
error,
fetch,
getServiceConfig,
getServiceConfig,
info,
isSandboxed,
metadata,
readDir,
readFile,
readJsonFile,
removeDir,
removeFile,
rename,
restart,
runCommand,
runDaemon,
runRsync,
signalGroup,
sleep,
start,
stop,
trace,
warn,
writeFile,
writeJsonFile,
restart,
start,
stop,
};
const fnSpecificArgs = {
main: { started },
};
const defaults = {
handleSignal: (effects, { gid, signal }) => {
return effects.signalGroup({ gid, signal });
},
};
function safeToString(fn, orValue = "") {
try {
return fn();
} catch (e) {
return orValue;
}
}
const apiVersion = mainModule?.version || defaults?.version || 0;
const runFunction = jsonPointerValue(mainModule, currentFunction) ||
jsonPointerValue(defaults, currentFunction);
const extraArgs = jsonPointerValue(fnSpecificArgs, currentFunction) || {};
(async () => {
const answer = await (async () => {
if (typeof runFunction !== "function") {
error(`Expecting ${currentFunction} to be a function`);
throw new Error(`Expecting ${currentFunction} to be a function`);
}
})()
.then(() => {
switch (apiVersion) {
case 0:
return runFunction(effects, input, ...variable_args);
case 1:
return runFunction({
effects,
input,
args: variable_args,
...extraArgs,
});
default:
return { error: `Unknown API version ${apiVersion}` };
}
})
.catch((e) => {
if ("error" in e) return e;
if ("error-code" in e) return e;
return {
error: safeToString(
() => e.toString(),
"Error Not able to be stringified",
),
};
});
await setState(answer);
})();

File diff suppressed because it is too large Load Diff

View File

@@ -1,38 +0,0 @@
[package]
name = "models"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
base64 = "0.21.4"
color-eyre = "0.6.2"
ed25519-dalek = { version = "2.0.0", features = ["serde"] }
lazy_static = "1.4"
mbrman = "0.5.2"
emver = { version = "0.1", git = "https://github.com/Start9Labs/emver-rs.git", features = [
"serde",
] }
ipnet = "2.8.0"
openssl = { version = "0.10.57", features = ["vendored"] }
patch-db = { version = "*", path = "../../patch-db/patch-db", features = [
"trace",
] }
rand = "0.8.5"
regex = "1.10.2"
reqwest = "0.11.22"
rpc-toolkit = "0.2.2"
serde = { version = "1.0", features = ["derive", "rc"] }
serde_json = "1.0"
sqlx = { version = "0.7.2", features = [
"chrono",
"runtime-tokio-rustls",
"postgres",
] }
ssh-key = "0.6.2"
thiserror = "1.0"
tokio = { version = "1", features = ["full"] }
torut = "0.2.1"
tracing = "0.1.39"
yasi = "0.1.5"

View File

@@ -1,171 +0,0 @@
use std::borrow::Cow;
use std::path::Path;
use base64::Engine;
use color_eyre::eyre::eyre;
use reqwest::header::CONTENT_TYPE;
use serde::{Deserialize, Serialize};
use tokio::io::{AsyncRead, AsyncReadExt};
use yasi::InternedString;
use crate::{mime, Error, ErrorKind, ResultExt};
#[derive(Clone)]
pub struct DataUrl<'a> {
mime: InternedString,
data: Cow<'a, [u8]>,
}
impl<'a> DataUrl<'a> {
pub const DEFAULT_MIME: &'static str = "application/octet-stream";
pub const MAX_SIZE: u64 = 100 * 1024;
// data:{mime};base64,{data}
pub fn to_string(&self) -> String {
use std::fmt::Write;
let mut res = String::with_capacity(self.data_url_len_without_mime() + self.mime.len());
let _ = write!(res, "data:{};base64,", self.mime);
base64::engine::general_purpose::STANDARD.encode_string(&self.data, &mut res);
res
}
fn data_url_len_without_mime(&self) -> usize {
5 + 8 + (4 * self.data.len() / 3) + 3
}
pub fn data_url_len(&self) -> usize {
self.data_url_len_without_mime() + self.mime.len()
}
pub fn from_slice(mime: &str, data: &'a [u8]) -> Self {
Self {
mime: InternedString::intern(mime),
data: Cow::Borrowed(data),
}
}
}
impl DataUrl<'static> {
pub async fn from_reader(
mime: &str,
rdr: impl AsyncRead + Unpin,
size_est: Option<u64>,
) -> Result<Self, Error> {
let check_size = |s| {
if s > Self::MAX_SIZE {
Err(Error::new(
eyre!("Data URLs must be smaller than 100KiB"),
ErrorKind::Filesystem,
))
} else {
Ok(s)
}
};
let mut buf = size_est
.map(check_size)
.transpose()?
.map(|s| Vec::with_capacity(s as usize))
.unwrap_or_default();
rdr.take(Self::MAX_SIZE + 1).read_to_end(&mut buf).await?;
check_size(buf.len() as u64)?;
Ok(Self {
mime: InternedString::intern(mime),
data: Cow::Owned(buf),
})
}
pub async fn from_path(path: impl AsRef<Path>) -> Result<Self, Error> {
let path = path.as_ref();
let f = tokio::fs::File::open(path).await?;
let m = f.metadata().await?;
let mime = path
.extension()
.and_then(|s| s.to_str())
.and_then(mime)
.unwrap_or(Self::DEFAULT_MIME);
Self::from_reader(mime, f, Some(m.len())).await
}
pub async fn from_response(res: reqwest::Response) -> Result<Self, Error> {
let mime = InternedString::intern(
res.headers()
.get(CONTENT_TYPE)
.and_then(|h| h.to_str().ok())
.unwrap_or(Self::DEFAULT_MIME),
);
let data = res.bytes().await.with_kind(ErrorKind::Network)?.to_vec();
Ok(Self {
mime,
data: Cow::Owned(data),
})
}
pub fn from_vec(mime: &str, data: Vec<u8>) -> Self {
Self {
mime: InternedString::intern(mime),
data: Cow::Owned(data),
}
}
}
impl<'a> std::fmt::Debug for DataUrl<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for DataUrl<'static> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = DataUrl<'static>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a valid base64 data url")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
v.strip_prefix("data:")
.and_then(|v| v.split_once(";base64,"))
.and_then(|(mime, data)| {
Some(DataUrl {
mime: InternedString::intern(mime),
data: Cow::Owned(
base64::engine::general_purpose::STANDARD
.decode(data)
.ok()?,
),
})
})
.ok_or_else(|| {
E::invalid_value(serde::de::Unexpected::Str(v), &"a valid base64 data url")
})
}
}
deserializer.deserialize_any(Visitor)
}
}
impl<'a> Serialize for DataUrl<'a> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
#[test]
fn doesnt_reallocate() {
let random: [u8; 10] = rand::random();
for i in 0..10 {
let icon = DataUrl {
mime: InternedString::intern("png"),
data: Cow::Borrowed(&random[..i]),
};
assert_eq!(dbg!(icon.to_string()).capacity(), icon.data_url_len());
}
}

View File

@@ -1,434 +0,0 @@
use std::fmt::Display;
use color_eyre::eyre::eyre;
use patch_db::Revision;
use rpc_toolkit::hyper::http::uri::InvalidUri;
use rpc_toolkit::reqwest;
use rpc_toolkit::yajrc::RpcError;
use crate::InvalidId;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ErrorKind {
Unknown = 1,
Filesystem = 2,
Docker = 3,
ConfigSpecViolation = 4,
ConfigRulesViolation = 5,
NotFound = 6,
IncorrectPassword = 7,
VersionIncompatible = 8,
Network = 9,
Registry = 10,
Serialization = 11,
Deserialization = 12,
Utf8 = 13,
ParseVersion = 14,
IncorrectDisk = 15,
// Nginx = 16,
Dependency = 17,
ParseS9pk = 18,
ParseUrl = 19,
DiskNotAvailable = 20,
BlockDevice = 21,
InvalidOnionAddress = 22,
Pack = 23,
ValidateS9pk = 24,
DiskCorrupted = 25, // Remove
Tor = 26,
ConfigGen = 27,
ParseNumber = 28,
Database = 29,
InvalidPackageId = 30,
InvalidSignature = 31,
Backup = 32,
Restore = 33,
Authorization = 34,
AutoConfigure = 35,
Action = 36,
RateLimited = 37,
InvalidRequest = 38,
MigrationFailed = 39,
Uninitialized = 40,
ParseNetAddress = 41,
ParseSshKey = 42,
SoundError = 43,
ParseTimestamp = 44,
ParseSysInfo = 45,
Wifi = 46,
Journald = 47,
DiskManagement = 48,
OpenSsl = 49,
PasswordHashGeneration = 50,
DiagnosticMode = 51,
ParseDbField = 52,
Duplicate = 53,
MultipleErrors = 54,
Incoherent = 55,
InvalidBackupTargetId = 56,
ProductKeyMismatch = 57,
LanPortConflict = 58,
Javascript = 59,
Pem = 60,
TLSInit = 61,
Ascii = 62,
MissingHeader = 63,
Grub = 64,
Systemd = 65,
OpenSsh = 66,
Zram = 67,
Lshw = 68,
CpuSettings = 69,
Firmware = 70,
Timeout = 71,
}
impl ErrorKind {
pub fn as_str(&self) -> &'static str {
use ErrorKind::*;
match self {
Unknown => "Unknown Error",
Filesystem => "Filesystem I/O Error",
Docker => "Docker Error",
ConfigSpecViolation => "Config Spec Violation",
ConfigRulesViolation => "Config Rules Violation",
NotFound => "Not Found",
IncorrectPassword => "Incorrect Password",
VersionIncompatible => "Version Incompatible",
Network => "Network Error",
Registry => "Registry Error",
Serialization => "Serialization Error",
Deserialization => "Deserialization Error",
Utf8 => "UTF-8 Parse Error",
ParseVersion => "Version Parsing Error",
IncorrectDisk => "Incorrect Disk",
// Nginx => "Nginx Error",
Dependency => "Dependency Error",
ParseS9pk => "S9PK Parsing Error",
ParseUrl => "URL Parsing Error",
DiskNotAvailable => "Disk Not Available",
BlockDevice => "Block Device Error",
InvalidOnionAddress => "Invalid Onion Address",
Pack => "Pack Error",
ValidateS9pk => "S9PK Validation Error",
DiskCorrupted => "Disk Corrupted", // Remove
Tor => "Tor Daemon Error",
ConfigGen => "Config Generation Error",
ParseNumber => "Number Parsing Error",
Database => "Database Error",
InvalidPackageId => "Invalid Package ID",
InvalidSignature => "Invalid Signature",
Backup => "Backup Error",
Restore => "Restore Error",
Authorization => "Unauthorized",
AutoConfigure => "Auto-Configure Error",
Action => "Action Failed",
RateLimited => "Rate Limited",
InvalidRequest => "Invalid Request",
MigrationFailed => "Migration Failed",
Uninitialized => "Uninitialized",
ParseNetAddress => "Net Address Parsing Error",
ParseSshKey => "SSH Key Parsing Error",
SoundError => "Sound Interface Error",
ParseTimestamp => "Timestamp Parsing Error",
ParseSysInfo => "System Info Parsing Error",
Wifi => "WiFi Internal Error",
Journald => "Journald Error",
DiskManagement => "Disk Management Error",
OpenSsl => "OpenSSL Internal Error",
PasswordHashGeneration => "Password Hash Generation Error",
DiagnosticMode => "Server is in Diagnostic Mode",
ParseDbField => "Database Field Parse Error",
Duplicate => "Duplication Error",
MultipleErrors => "Multiple Errors",
Incoherent => "Incoherent",
InvalidBackupTargetId => "Invalid Backup Target ID",
ProductKeyMismatch => "Incompatible Product Keys",
LanPortConflict => "Incompatible LAN Port Configuration",
Javascript => "Javascript Engine Error",
Pem => "PEM Encoding Error",
TLSInit => "TLS Backend Initialization Error",
Ascii => "ASCII Parse Error",
MissingHeader => "Missing Header",
Grub => "Grub Error",
Systemd => "Systemd Error",
OpenSsh => "OpenSSH Error",
Zram => "Zram Error",
Lshw => "LSHW Error",
CpuSettings => "CPU Settings Error",
Firmware => "Firmware Error",
Timeout => "Timeout Error",
}
}
}
impl Display for ErrorKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
#[derive(Debug)]
pub struct Error {
pub source: color_eyre::eyre::Error,
pub kind: ErrorKind,
pub revision: Option<Revision>,
}
impl Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}: {}", self.kind.as_str(), self.source)
}
}
impl Error {
pub fn new<E: Into<color_eyre::eyre::Error>>(source: E, kind: ErrorKind) -> Self {
Error {
source: source.into(),
kind,
revision: None,
}
}
}
impl From<InvalidId> for Error {
fn from(err: InvalidId) -> Self {
Error::new(err, ErrorKind::InvalidPackageId)
}
}
impl From<std::io::Error> for Error {
fn from(e: std::io::Error) -> Self {
Error::new(e, ErrorKind::Filesystem)
}
}
impl From<std::str::Utf8Error> for Error {
fn from(e: std::str::Utf8Error) -> Self {
Error::new(e, ErrorKind::Utf8)
}
}
impl From<std::string::FromUtf8Error> for Error {
fn from(e: std::string::FromUtf8Error) -> Self {
Error::new(e, ErrorKind::Utf8)
}
}
impl From<emver::ParseError> for Error {
fn from(e: emver::ParseError) -> Self {
Error::new(e, ErrorKind::ParseVersion)
}
}
impl From<rpc_toolkit::url::ParseError> for Error {
fn from(e: rpc_toolkit::url::ParseError) -> Self {
Error::new(e, ErrorKind::ParseUrl)
}
}
impl From<std::num::ParseIntError> for Error {
fn from(e: std::num::ParseIntError) -> Self {
Error::new(e, ErrorKind::ParseNumber)
}
}
impl From<std::num::ParseFloatError> for Error {
fn from(e: std::num::ParseFloatError) -> Self {
Error::new(e, ErrorKind::ParseNumber)
}
}
impl From<patch_db::Error> for Error {
fn from(e: patch_db::Error) -> Self {
Error::new(e, ErrorKind::Database)
}
}
impl From<sqlx::Error> for Error {
fn from(e: sqlx::Error) -> Self {
Error::new(e, ErrorKind::Database)
}
}
impl From<ed25519_dalek::SignatureError> for Error {
fn from(e: ed25519_dalek::SignatureError) -> Self {
Error::new(e, ErrorKind::InvalidSignature)
}
}
impl From<std::net::AddrParseError> for Error {
fn from(e: std::net::AddrParseError) -> Self {
Error::new(e, ErrorKind::ParseNetAddress)
}
}
impl From<torut::control::ConnError> for Error {
fn from(e: torut::control::ConnError) -> Self {
Error::new(e, ErrorKind::Tor)
}
}
impl From<ipnet::AddrParseError> for Error {
fn from(e: ipnet::AddrParseError) -> Self {
Error::new(e, ErrorKind::ParseNetAddress)
}
}
impl From<openssl::error::ErrorStack> for Error {
fn from(e: openssl::error::ErrorStack) -> Self {
Error::new(eyre!("{}", e), ErrorKind::OpenSsl)
}
}
impl From<mbrman::Error> for Error {
fn from(e: mbrman::Error) -> Self {
Error::new(e, ErrorKind::DiskManagement)
}
}
impl From<InvalidUri> for Error {
fn from(e: InvalidUri) -> Self {
Error::new(eyre!("{}", e), ErrorKind::ParseUrl)
}
}
impl From<ssh_key::Error> for Error {
fn from(e: ssh_key::Error) -> Self {
Error::new(e, ErrorKind::OpenSsh)
}
}
impl From<reqwest::Error> for Error {
fn from(e: reqwest::Error) -> Self {
let kind = match e {
_ if e.is_builder() => ErrorKind::ParseUrl,
_ if e.is_decode() => ErrorKind::Deserialization,
_ => ErrorKind::Network,
};
Error::new(e, kind)
}
}
impl From<patch_db::value::Error> for Error {
fn from(value: patch_db::value::Error) -> Self {
match value.kind {
patch_db::value::ErrorKind::Serialization => {
Error::new(value.source, ErrorKind::Serialization)
}
patch_db::value::ErrorKind::Deserialization => {
Error::new(value.source, ErrorKind::Deserialization)
}
}
}
}
impl From<Error> for RpcError {
fn from(e: Error) -> Self {
let mut data_object = serde_json::Map::with_capacity(3);
data_object.insert("details".to_owned(), format!("{}", e.source).into());
data_object.insert("debug".to_owned(), format!("{:?}", e.source).into());
data_object.insert(
"revision".to_owned(),
match serde_json::to_value(&e.revision) {
Ok(a) => a,
Err(e) => {
tracing::warn!("Error serializing revision for Error object: {}", e);
serde_json::Value::Null
}
},
);
RpcError {
code: e.kind as i32,
message: e.kind.as_str().into(),
data: Some(data_object.into()),
}
}
}
#[derive(Debug, Default)]
pub struct ErrorCollection(Vec<Error>);
impl ErrorCollection {
pub fn new() -> Self {
Self::default()
}
pub fn handle<T, E: Into<Error>>(&mut self, result: Result<T, E>) -> Option<T> {
match result {
Ok(a) => Some(a),
Err(e) => {
self.0.push(e.into());
None
}
}
}
pub fn into_result(self) -> Result<(), Error> {
if self.0.is_empty() {
Ok(())
} else {
Err(Error::new(eyre!("{}", self), ErrorKind::MultipleErrors))
}
}
}
impl From<ErrorCollection> for Result<(), Error> {
fn from(e: ErrorCollection) -> Self {
e.into_result()
}
}
impl<T, E: Into<Error>> Extend<Result<T, E>> for ErrorCollection {
fn extend<I: IntoIterator<Item = Result<T, E>>>(&mut self, iter: I) {
for item in iter {
self.handle(item);
}
}
}
impl std::fmt::Display for ErrorCollection {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for (idx, e) in self.0.iter().enumerate() {
if idx > 0 {
write!(f, "; ")?;
}
write!(f, "{}", e)?;
}
Ok(())
}
}
pub trait ResultExt<T, E>
where
Self: Sized,
{
fn with_kind(self, kind: ErrorKind) -> Result<T, Error>;
fn with_ctx<F: FnOnce(&E) -> (ErrorKind, D), D: Display + Send + Sync + 'static>(
self,
f: F,
) -> Result<T, Error>;
}
impl<T, E> ResultExt<T, E> for Result<T, E>
where
color_eyre::eyre::Error: From<E>,
{
fn with_kind(self, kind: ErrorKind) -> Result<T, Error> {
self.map_err(|e| Error {
source: e.into(),
kind,
revision: None,
})
}
fn with_ctx<F: FnOnce(&E) -> (ErrorKind, D), D: Display + Send + Sync + 'static>(
self,
f: F,
) -> Result<T, Error> {
self.map_err(|e| {
let (kind, ctx) = f(&e);
let source = color_eyre::eyre::Error::from(e);
let ctx = format!("{}: {}", ctx, source);
let source = source.wrap_err(ctx);
Error {
kind,
source,
revision: None,
}
})
}
}
pub trait OptionExt<T>
where
Self: Sized,
{
fn or_not_found(self, message: impl std::fmt::Display) -> Result<T, Error>;
}
impl<T> OptionExt<T> for Option<T> {
fn or_not_found(self, message: impl std::fmt::Display) -> Result<T, Error> {
self.ok_or_else(|| Error::new(eyre!("{}", message), ErrorKind::NotFound))
}
}
#[macro_export]
macro_rules! ensure_code {
($x:expr, $c:expr, $fmt:expr $(, $arg:expr)*) => {
if !($x) {
return Err(Error::new(color_eyre::eyre::eyre!($fmt, $($arg, )*), $c));
}
};
}

View File

@@ -1,43 +0,0 @@
use std::path::Path;
use std::str::FromStr;
use serde::{Deserialize, Serialize};
use crate::{Id, InvalidId};
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)]
pub struct ActionId(Id);
impl FromStr for ActionId {
type Err = InvalidId;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(ActionId(Id::try_from(s.to_owned())?))
}
}
impl AsRef<ActionId> for ActionId {
fn as_ref(&self) -> &ActionId {
self
}
}
impl std::fmt::Display for ActionId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", &self.0)
}
}
impl AsRef<str> for ActionId {
fn as_ref(&self) -> &str {
self.0.as_ref()
}
}
impl AsRef<Path> for ActionId {
fn as_ref(&self) -> &Path {
self.0.as_ref().as_ref()
}
}
impl<'de> Deserialize<'de> for ActionId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
Ok(ActionId(serde::Deserialize::deserialize(deserializer)?))
}
}

View File

@@ -1,59 +0,0 @@
use std::path::Path;
use serde::{Deserialize, Deserializer, Serialize};
use crate::Id;
#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)]
pub struct AddressId(Id);
impl From<Id> for AddressId {
fn from(id: Id) -> Self {
Self(id)
}
}
impl std::fmt::Display for AddressId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", &self.0)
}
}
impl std::ops::Deref for AddressId {
type Target = str;
fn deref(&self) -> &Self::Target {
&*self.0
}
}
impl AsRef<str> for AddressId {
fn as_ref(&self) -> &str {
self.0.as_ref()
}
}
impl<'de> Deserialize<'de> for AddressId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(AddressId(Deserialize::deserialize(deserializer)?))
}
}
impl AsRef<Path> for AddressId {
fn as_ref(&self) -> &Path {
self.0.as_ref().as_ref()
}
}
impl<'q> sqlx::Encode<'q, sqlx::Postgres> for AddressId {
fn encode_by_ref(
&self,
buf: &mut <sqlx::Postgres as sqlx::database::HasArguments<'q>>::ArgumentBuffer,
) -> sqlx::encode::IsNull {
<&str as sqlx::Encode<'q, sqlx::Postgres>>::encode_by_ref(&&**self, buf)
}
}
impl sqlx::Type<sqlx::Postgres> for AddressId {
fn type_info() -> sqlx::postgres::PgTypeInfo {
<&str as sqlx::Type<sqlx::Postgres>>::type_info()
}
fn compatible(ty: &sqlx::postgres::PgTypeInfo) -> bool {
<&str as sqlx::Type<sqlx::Postgres>>::compatible(ty)
}
}

View File

@@ -1,31 +0,0 @@
use std::path::Path;
use serde::{Deserialize, Deserializer, Serialize};
use crate::Id;
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)]
pub struct HealthCheckId(Id);
impl std::fmt::Display for HealthCheckId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", &self.0)
}
}
impl AsRef<str> for HealthCheckId {
fn as_ref(&self) -> &str {
self.0.as_ref()
}
}
impl<'de> Deserialize<'de> for HealthCheckId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(HealthCheckId(Deserialize::deserialize(deserializer)?))
}
}
impl AsRef<Path> for HealthCheckId {
fn as_ref(&self) -> &Path {
self.0.as_ref().as_ref()
}
}

View File

@@ -1,38 +0,0 @@
use std::fmt::Debug;
use std::str::FromStr;
use serde::{Deserialize, Deserializer, Serialize};
use crate::{Id, InvalidId, PackageId, Version};
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)]
pub struct ImageId(Id);
impl std::fmt::Display for ImageId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", &self.0)
}
}
impl ImageId {
pub fn for_package(&self, pkg_id: &PackageId, pkg_version: Option<&Version>) -> String {
format!(
"start9/{}/{}:{}",
pkg_id,
self.0,
pkg_version.map(|v| { v.as_str() }).unwrap_or("latest")
)
}
}
impl FromStr for ImageId {
type Err = InvalidId;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(ImageId(Id::try_from(s.to_owned())?))
}
}
impl<'de> Deserialize<'de> for ImageId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(ImageId(Deserialize::deserialize(deserializer)?))
}
}

View File

@@ -1,59 +0,0 @@
use std::path::Path;
use serde::{Deserialize, Deserializer, Serialize};
use crate::Id;
#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)]
pub struct InterfaceId(Id);
impl From<Id> for InterfaceId {
fn from(id: Id) -> Self {
Self(id)
}
}
impl std::fmt::Display for InterfaceId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", &self.0)
}
}
impl std::ops::Deref for InterfaceId {
type Target = str;
fn deref(&self) -> &Self::Target {
&*self.0
}
}
impl AsRef<str> for InterfaceId {
fn as_ref(&self) -> &str {
self.0.as_ref()
}
}
impl<'de> Deserialize<'de> for InterfaceId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(InterfaceId(Deserialize::deserialize(deserializer)?))
}
}
impl AsRef<Path> for InterfaceId {
fn as_ref(&self) -> &Path {
self.0.as_ref().as_ref()
}
}
impl<'q> sqlx::Encode<'q, sqlx::Postgres> for InterfaceId {
fn encode_by_ref(
&self,
buf: &mut <sqlx::Postgres as sqlx::database::HasArguments<'q>>::ArgumentBuffer,
) -> sqlx::encode::IsNull {
<&str as sqlx::Encode<'q, sqlx::Postgres>>::encode_by_ref(&&**self, buf)
}
}
impl sqlx::Type<sqlx::Postgres> for InterfaceId {
fn type_info() -> sqlx::postgres::PgTypeInfo {
<&str as sqlx::Type<sqlx::Postgres>>::type_info()
}
fn compatible(ty: &sqlx::postgres::PgTypeInfo) -> bool {
<&str as sqlx::Type<sqlx::Postgres>>::compatible(ty)
}
}

View File

@@ -1,3 +0,0 @@
#[derive(Debug, thiserror::Error)]
#[error("Invalid ID")]
pub struct InvalidId;

View File

@@ -1,116 +0,0 @@
use std::borrow::Borrow;
use regex::Regex;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use yasi::InternedString;
mod action;
mod address;
mod health_check;
mod image;
mod interface;
mod invalid_id;
mod package;
mod volume;
pub use action::ActionId;
pub use address::AddressId;
pub use health_check::HealthCheckId;
pub use image::ImageId;
pub use interface::InterfaceId;
pub use invalid_id::InvalidId;
pub use package::{PackageId, SYSTEM_PACKAGE_ID};
pub use volume::VolumeId;
lazy_static::lazy_static! {
static ref ID_REGEX: Regex = Regex::new("^[a-z]+(-[a-z]+)*$").unwrap();
pub static ref SYSTEM_ID: Id = Id(InternedString::intern("x_system"));
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct Id(InternedString);
impl TryFrom<InternedString> for Id {
type Error = InvalidId;
fn try_from(value: InternedString) -> Result<Self, Self::Error> {
if ID_REGEX.is_match(&*value) {
Ok(Id(value))
} else {
Err(InvalidId)
}
}
}
impl TryFrom<String> for Id {
type Error = InvalidId;
fn try_from(value: String) -> Result<Self, Self::Error> {
if ID_REGEX.is_match(&value) {
Ok(Id(InternedString::intern(value)))
} else {
Err(InvalidId)
}
}
}
impl TryFrom<&str> for Id {
type Error = InvalidId;
fn try_from(value: &str) -> Result<Self, Self::Error> {
if ID_REGEX.is_match(&value) {
Ok(Id(InternedString::intern(value)))
} else {
Err(InvalidId)
}
}
}
impl std::ops::Deref for Id {
type Target = str;
fn deref(&self) -> &Self::Target {
&*self.0
}
}
impl std::fmt::Display for Id {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", &*self.0)
}
}
impl AsRef<str> for Id {
fn as_ref(&self) -> &str {
&*self.0
}
}
impl Borrow<str> for Id {
fn borrow(&self) -> &str {
self.0.as_ref()
}
}
impl<'de> Deserialize<'de> for Id {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let unchecked: InternedString = Deserialize::deserialize(deserializer)?;
Id::try_from(unchecked).map_err(serde::de::Error::custom)
}
}
impl Serialize for Id {
fn serialize<Ser>(&self, serializer: Ser) -> Result<Ser::Ok, Ser::Error>
where
Ser: Serializer,
{
serializer.serialize_str(&*self)
}
}
impl<'q> sqlx::Encode<'q, sqlx::Postgres> for Id {
fn encode_by_ref(
&self,
buf: &mut <sqlx::Postgres as sqlx::database::HasArguments<'q>>::ArgumentBuffer,
) -> sqlx::encode::IsNull {
<&str as sqlx::Encode<'q, sqlx::Postgres>>::encode_by_ref(&&**self, buf)
}
}
impl sqlx::Type<sqlx::Postgres> for Id {
fn type_info() -> sqlx::postgres::PgTypeInfo {
<&str as sqlx::Type<sqlx::Postgres>>::type_info()
}
fn compatible(ty: &sqlx::postgres::PgTypeInfo) -> bool {
<&str as sqlx::Type<sqlx::Postgres>>::compatible(ty)
}
}

View File

@@ -1,88 +0,0 @@
use std::borrow::Borrow;
use std::path::Path;
use std::str::FromStr;
use serde::{Deserialize, Serialize, Serializer};
use crate::{Id, InvalidId, SYSTEM_ID};
lazy_static::lazy_static! {
pub static ref SYSTEM_PACKAGE_ID: PackageId = PackageId(SYSTEM_ID.clone());
}
#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct PackageId(Id);
impl FromStr for PackageId {
type Err = InvalidId;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(PackageId(Id::try_from(s.to_owned())?))
}
}
impl From<Id> for PackageId {
fn from(id: Id) -> Self {
PackageId(id)
}
}
impl std::ops::Deref for PackageId {
type Target = str;
fn deref(&self) -> &Self::Target {
&*self.0
}
}
impl AsRef<PackageId> for PackageId {
fn as_ref(&self) -> &PackageId {
self
}
}
impl std::fmt::Display for PackageId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", &self.0)
}
}
impl AsRef<str> for PackageId {
fn as_ref(&self) -> &str {
self.0.as_ref()
}
}
impl Borrow<str> for PackageId {
fn borrow(&self) -> &str {
self.0.as_ref()
}
}
impl AsRef<Path> for PackageId {
fn as_ref(&self) -> &Path {
self.0.as_ref().as_ref()
}
}
impl<'de> Deserialize<'de> for PackageId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
Ok(PackageId(Deserialize::deserialize(deserializer)?))
}
}
impl Serialize for PackageId {
fn serialize<Ser>(&self, serializer: Ser) -> Result<Ser::Ok, Ser::Error>
where
Ser: Serializer,
{
Serialize::serialize(&self.0, serializer)
}
}
impl<'q> sqlx::Encode<'q, sqlx::Postgres> for PackageId {
fn encode_by_ref(
&self,
buf: &mut <sqlx::Postgres as sqlx::database::HasArguments<'q>>::ArgumentBuffer,
) -> sqlx::encode::IsNull {
<&str as sqlx::Encode<'q, sqlx::Postgres>>::encode_by_ref(&&**self, buf)
}
}
impl sqlx::Type<sqlx::Postgres> for PackageId {
fn type_info() -> sqlx::postgres::PgTypeInfo {
<&str as sqlx::Type<sqlx::Postgres>>::type_info()
}
fn compatible(ty: &sqlx::postgres::PgTypeInfo) -> bool {
<&str as sqlx::Type<sqlx::Postgres>>::compatible(ty)
}
}

View File

@@ -1,58 +0,0 @@
use std::borrow::Borrow;
use std::path::Path;
use serde::{Deserialize, Deserializer, Serialize};
use crate::Id;
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum VolumeId {
Backup,
Custom(Id),
}
impl std::fmt::Display for VolumeId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
VolumeId::Backup => write!(f, "BACKUP"),
VolumeId::Custom(id) => write!(f, "{}", id),
}
}
}
impl AsRef<str> for VolumeId {
fn as_ref(&self) -> &str {
match self {
VolumeId::Backup => "BACKUP",
VolumeId::Custom(id) => id.as_ref(),
}
}
}
impl Borrow<str> for VolumeId {
fn borrow(&self) -> &str {
self.as_ref()
}
}
impl AsRef<Path> for VolumeId {
fn as_ref(&self) -> &Path {
AsRef::<str>::as_ref(self).as_ref()
}
}
impl<'de> Deserialize<'de> for VolumeId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let unchecked: String = Deserialize::deserialize(deserializer)?;
Ok(match unchecked.as_ref() {
"BACKUP" => VolumeId::Backup,
_ => VolumeId::Custom(Id::try_from(unchecked).map_err(serde::de::Error::custom)?),
})
}
}
impl Serialize for VolumeId {
fn serialize<Ser>(&self, serializer: Ser) -> Result<Ser::Ok, Ser::Error>
where
Ser: serde::Serializer,
{
serializer.serialize_str(self.as_ref())
}
}

View File

@@ -1,163 +0,0 @@
use std::{future::Future, pin::Pin, sync::Arc, time::Duration};
use color_eyre::eyre::bail;
use embassy_container_init::{Input, Output, ProcessId, RpcId};
use tokio::sync::{
mpsc::{UnboundedReceiver, UnboundedSender},
Mutex,
};
/// Used by the js-executor, it is the ability to just create a command in an already running exec
pub type ExecCommand = Arc<
dyn Fn(
String,
Vec<String>,
UnboundedSender<embassy_container_init::Output>,
Option<Duration>,
) -> Pin<Box<dyn Future<Output = Result<RpcId, String>> + 'static>>
+ Send
+ Sync
+ 'static,
>;
/// Used by the js-executor, it is the ability to just create a command in an already running exec
pub type SendKillSignal = Arc<
dyn Fn(RpcId, u32) -> Pin<Box<dyn Future<Output = Result<(), String>> + 'static>>
+ Send
+ Sync
+ 'static,
>;
pub trait CommandInserter {
fn insert_command(
&self,
command: String,
args: Vec<String>,
sender: UnboundedSender<embassy_container_init::Output>,
timeout: Option<Duration>,
) -> Pin<Box<dyn Future<Output = Option<RpcId>>>>;
fn send_signal(&self, id: RpcId, command: u32) -> Pin<Box<dyn Future<Output = ()>>>;
}
pub type ArcCommandInserter = Arc<Mutex<Option<Box<dyn CommandInserter>>>>;
pub struct ExecutingCommand {
rpc_id: RpcId,
/// Will exist until killed
command_inserter: Arc<Mutex<Option<ArcCommandInserter>>>,
owned_futures: Arc<Mutex<Vec<Pin<Box<dyn Future<Output = ()>>>>>>,
}
impl ExecutingCommand {
pub async fn new(
command_inserter: ArcCommandInserter,
command: String,
args: Vec<String>,
timeout: Option<Duration>,
) -> Result<ExecutingCommand, color_eyre::Report> {
let (sender, receiver) = tokio::sync::mpsc::unbounded_channel::<Output>();
let rpc_id = {
let locked_command_inserter = command_inserter.lock().await;
let locked_command_inserter = match &*locked_command_inserter {
Some(a) => a,
None => bail!("Expecting containers.main in the package manifest".to_string()),
};
match locked_command_inserter
.insert_command(command, args, sender, timeout)
.await
{
Some(a) => a,
None => bail!("Couldn't get command started ".to_string()),
}
};
let executing_commands = ExecutingCommand {
rpc_id,
command_inserter: Arc::new(Mutex::new(Some(command_inserter.clone()))),
owned_futures: Default::default(),
};
// let waiting = self.wait()
Ok(executing_commands)
}
async fn wait(
rpc_id: RpcId,
mut outputs: UnboundedReceiver<Output>,
) -> Result<String, (Option<i32>, String)> {
let (process_id_send, process_id_recv) = tokio::sync::oneshot::channel::<ProcessId>();
let mut answer = String::new();
let mut command_error = String::new();
let mut status: Option<i32> = None;
let mut process_id_send = Some(process_id_send);
while let Some(output) = outputs.recv().await {
match output {
Output::ProcessId(process_id) => {
if let Some(process_id_send) = process_id_send.take() {
if let Err(err) = process_id_send.send(process_id) {
tracing::error!(
"Could not get a process id {process_id:?} sent for {rpc_id:?}"
);
tracing::debug!("{err:?}");
}
}
}
Output::Line(value) => {
answer.push_str(&value);
answer.push('\n');
}
Output::Error(error) => {
command_error.push_str(&error);
command_error.push('\n');
}
Output::Done(error_code) => {
status = error_code;
break;
}
}
}
if !command_error.is_empty() {
return Err((status, command_error));
}
Ok(answer)
}
async fn send_signal(&self, signal: u32) {
let locked = self.command_inserter.lock().await;
let inner = match &*locked {
Some(a) => a,
None => return,
};
let locked = inner.lock().await;
let command_inserter = match &*locked {
Some(a) => a,
None => return,
};
command_inserter.send_signal(self.rpc_id, signal);
}
/// Should only be called when output::done
async fn killed(&self) {
*self.owned_futures.lock().await = Default::default();
*self.command_inserter.lock().await = Default::default();
}
pub fn rpc_id(&self) -> RpcId {
self.rpc_id
}
}
impl Drop for ExecutingCommand {
fn drop(&mut self) {
let command_inserter = self.command_inserter.clone();
let rpc_id = self.rpc_id.clone();
tokio::spawn(async move {
let command_inserter_lock = command_inserter.lock().await;
let command_inserter = match &*command_inserter_lock {
Some(a) => a,
None => {
return;
}
};
command_inserter.send_kill_command(rpc_id, 9).await;
});
}
}

View File

@@ -1,13 +0,0 @@
mod data_url;
mod errors;
mod id;
mod mime;
mod procedure_name;
mod version;
pub use data_url::*;
pub use errors::*;
pub use id::*;
pub use mime::*;
pub use procedure_name::*;
pub use version::*;

View File

@@ -1,47 +0,0 @@
pub fn mime(extension: &str) -> Option<&'static str> {
match extension {
"apng" => Some("image/apng"),
"avif" => Some("image/avif"),
"flif" => Some("image/flif"),
"gif" => Some("image/gif"),
"jpg" | "jpeg" | "jfif" | "pjpeg" | "pjp" => Some("image/jpeg"),
"jxl" => Some("image/jxl"),
"png" => Some("image/png"),
"svg" => Some("image/svg+xml"),
"webp" => Some("image/webp"),
"mng" | "x-mng" => Some("image/x-mng"),
"css" => Some("text/css"),
"csv" => Some("text/csv"),
"html" => Some("text/html"),
"php" => Some("text/php"),
"plain" | "md" | "txt" => Some("text/plain"),
"xml" => Some("text/xml"),
"js" => Some("text/javascript"),
"wasm" => Some("application/wasm"),
_ => None,
}
}
pub fn unmime(mime: &str) -> Option<&'static str> {
match mime {
"image/apng" => Some("apng"),
"image/avif" => Some("avif"),
"image/flif" => Some("flif"),
"image/gif" => Some("gif"),
"jpg" | "jpeg" | "jfif" | "pjpeg" | "image/jpeg" => Some("pjp"),
"image/jxl" => Some("jxl"),
"image/png" => Some("png"),
"image/svg+xml" => Some("svg"),
"image/webp" => Some("webp"),
"mng" | "image/x-mng" => Some("x-mng"),
"text/css" => Some("css"),
"text/csv" => Some("csv"),
"text/html" => Some("html"),
"text/php" => Some("php"),
"plain" | "md" | "text/plain" => Some("txt"),
"text/xml" => Some("xml"),
"text/javascript" => Some("js"),
"application/wasm" => Some("wasm"),
_ => None,
}
}

View File

@@ -1,57 +0,0 @@
use serde::{Deserialize, Serialize};
use crate::{ActionId, HealthCheckId, PackageId};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ProcedureName {
Main, // Usually just run container
CreateBackup,
RestoreBackup,
GetConfig,
SetConfig,
Migration,
Properties,
LongRunning,
Check(PackageId),
AutoConfig(PackageId),
Health(HealthCheckId),
Action(ActionId),
Signal,
}
impl ProcedureName {
pub fn docker_name(&self) -> Option<String> {
match self {
ProcedureName::Main => None,
ProcedureName::LongRunning => None,
ProcedureName::CreateBackup => Some("CreateBackup".to_string()),
ProcedureName::RestoreBackup => Some("RestoreBackup".to_string()),
ProcedureName::GetConfig => Some("GetConfig".to_string()),
ProcedureName::SetConfig => Some("SetConfig".to_string()),
ProcedureName::Migration => Some("Migration".to_string()),
ProcedureName::Properties => Some(format!("Properties-{}", rand::random::<u64>())),
ProcedureName::Health(id) => Some(format!("{}Health", id)),
ProcedureName::Action(id) => Some(format!("{}Action", id)),
ProcedureName::Check(_) => None,
ProcedureName::AutoConfig(_) => None,
ProcedureName::Signal => None,
}
}
pub fn js_function_name(&self) -> Option<String> {
match self {
ProcedureName::Main => Some("/main".to_string()),
ProcedureName::LongRunning => None,
ProcedureName::CreateBackup => Some("/createBackup".to_string()),
ProcedureName::RestoreBackup => Some("/restoreBackup".to_string()),
ProcedureName::GetConfig => Some("/getConfig".to_string()),
ProcedureName::SetConfig => Some("/setConfig".to_string()),
ProcedureName::Migration => Some("/migration".to_string()),
ProcedureName::Properties => Some("/properties".to_string()),
ProcedureName::Health(id) => Some(format!("/health/{}", id)),
ProcedureName::Action(id) => Some(format!("/action/{}", id)),
ProcedureName::Check(id) => Some(format!("/dependencies/{}/check", id)),
ProcedureName::AutoConfig(id) => Some(format!("/dependencies/{}/autoConfigure", id)),
ProcedureName::Signal => Some("/handleSignal".to_string()),
}
}
}

View File

@@ -1,106 +0,0 @@
use std::hash::{Hash, Hasher};
use std::ops::Deref;
use std::str::FromStr;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
#[derive(Debug, Clone)]
pub struct Version {
version: emver::Version,
string: String,
}
impl Version {
pub fn as_str(&self) -> &str {
self.string.as_str()
}
pub fn into_version(self) -> emver::Version {
self.version
}
}
impl std::fmt::Display for Version {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.string)
}
}
impl std::str::FromStr for Version {
type Err = <emver::Version as FromStr>::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Version {
string: s.to_owned(),
version: s.parse()?,
})
}
}
impl From<emver::Version> for Version {
fn from(v: emver::Version) -> Self {
Version {
string: v.to_string(),
version: v,
}
}
}
impl From<Version> for emver::Version {
fn from(v: Version) -> Self {
v.version
}
}
impl Default for Version {
fn default() -> Self {
Self::from(emver::Version::default())
}
}
impl Deref for Version {
type Target = emver::Version;
fn deref(&self) -> &Self::Target {
&self.version
}
}
impl AsRef<emver::Version> for Version {
fn as_ref(&self) -> &emver::Version {
&self.version
}
}
impl AsRef<str> for Version {
fn as_ref(&self) -> &str {
self.as_str()
}
}
impl PartialEq for Version {
fn eq(&self, other: &Version) -> bool {
self.version.eq(&other.version)
}
}
impl Eq for Version {}
impl PartialOrd for Version {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.version.partial_cmp(&other.version)
}
}
impl Ord for Version {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.version.cmp(&other.version)
}
}
impl Hash for Version {
fn hash<H: Hasher>(&self, state: &mut H) {
self.version.hash(state)
}
}
impl<'de> Deserialize<'de> for Version {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let string = String::deserialize(deserializer)?;
let version = emver::Version::from_str(&string).map_err(::serde::de::Error::custom)?;
Ok(Self { string, version })
}
}
impl Serialize for Version {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.string.serialize(serializer)
}
}

View File

@@ -1,2 +0,0 @@
group_imports = "StdExternalCrate"
imports_granularity = "Module"

View File

@@ -1,11 +0,0 @@
[package]
name = "snapshot_creator"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
dashmap = "5.3.4"
deno_core = "=0.222.0"
deno_ast = { version = "=0.29.5", features = ["transpiling"] }

View File

@@ -1,11 +0,0 @@
use deno_core::JsRuntimeForSnapshot;
fn main() {
let runtime = JsRuntimeForSnapshot::new(Default::default());
let snapshot = runtime.snapshot();
let snapshot_slice: &[u8] = &*snapshot;
println!("Snapshot size: {}", snapshot_slice.len());
std::fs::write("JS_SNAPSHOT.bin", snapshot_slice).unwrap();
}