wip: Starting down the bind for the effects

todo: complete a ip todo

chore: Fix the result type on something

todo: Address returning

chore: JS with callbacks

chore: Add in the chown and permissions

chore: Add in the binds and unbinds in
This commit is contained in:
BluJ
2023-02-06 12:14:48 -07:00
committed by Aiden McClelland
parent 550b17552b
commit 9366dbb96e
10 changed files with 1237 additions and 460 deletions

View File

@@ -1,8 +1,11 @@
use helpers::{Callback, OsApi}; use color_eyre::{eyre::eyre, Report};
use models::PackageId; use helpers::{AddressSchemaLocal, AddressSchemaOnion, Callback, OsApi};
use models::{InterfaceId, PackageId};
use sqlx::Acquire;
use crate::manager::Manager; use crate::{manager::Manager, net::keys::Key};
use crate::Error;
use super::try_get_running_ip;
#[async_trait::async_trait] #[async_trait::async_trait]
impl OsApi for Manager { impl OsApi for Manager {
@@ -11,7 +14,103 @@ impl OsApi for Manager {
id: PackageId, id: PackageId,
path: &str, path: &str,
callback: Callback, callback: Callback,
) -> Result<serde_json::Value, Error> { ) -> Result<serde_json::Value, Report> {
todo!() todo!("BLUJ")
}
async fn bind_local(
&self,
internal_port: u16,
address_schema: AddressSchemaLocal,
) -> Result<helpers::Address, Report> {
let ip = try_get_running_ip(&self.seed)
.await?
.ok_or_else(|| eyre!("No ip available"))?;
let AddressSchemaLocal { id, external_port } = address_schema;
let mut svc = self
.seed
.ctx
.net_controller
.create_service(self.seed.manifest.id.clone(), ip)
.await
.map_err(|e| eyre!("Could not get to net controller: {e:?}"))?;
let mut secrets = self.seed.ctx.secret_store.acquire().await?;
let mut tx = secrets.begin().await?;
svc.add_lan(&mut tx, id.clone(), external_port, internal_port, false)
.await
.map_err(|e| eyre!("Could not add to local: {e:?}"))?;
let key = Key::for_interface(&mut tx, Some((self.seed.manifest.id.clone(), id)))
.await
.map_err(|e| eyre!("Could not get network name: {e:?}"))?
.local_address();
tx.commit().await?;
Ok(helpers::Address(key))
}
async fn bind_onion(
&self,
internal_port: u16,
address_schema: AddressSchemaOnion,
) -> Result<helpers::Address, Report> {
let AddressSchemaOnion { id, external_port } = address_schema;
let ip = try_get_running_ip(&self.seed)
.await?
.ok_or_else(|| eyre!("No ip available"))?;
let mut svc = self
.seed
.ctx
.net_controller
.create_service(self.seed.manifest.id.clone(), ip)
.await
.map_err(|e| eyre!("Could not get to net controller: {e:?}"))?;
let mut secrets = self.seed.ctx.secret_store.acquire().await?;
let mut tx = secrets.begin().await?;
svc.add_tor(&mut tx, id.clone(), external_port, internal_port)
.await
.map_err(|e| eyre!("Could not add to tor: {e:?}"))?;
let key = Key::for_interface(&mut tx, Some((self.seed.manifest.id.clone(), id)))
.await
.map_err(|e| eyre!("Could not get network name: {e:?}"))?
.tor_address()
.to_string();
tx.commit().await?;
Ok(helpers::Address(key))
}
async fn unbind_onion(&self, id: InterfaceId, external: u16) -> Result<(), Report> {
let ip = try_get_running_ip(&self.seed)
.await?
.ok_or_else(|| eyre!("No ip available"))?;
let mut svc = self
.seed
.ctx
.net_controller
.create_service(self.seed.manifest.id.clone(), ip)
.await
.map_err(|e| eyre!("Could not get to net controller: {e:?}"))?;
let mut secrets = self.seed.ctx.secret_store.acquire().await?;
svc.remove_tor(id, external)
.await
.map_err(|e| eyre!("Could not add to tor: {e:?}"))?;
Ok(())
}
async fn unbind_local(&self, id: InterfaceId, external: u16) -> Result<(), Report> {
let ip = try_get_running_ip(&self.seed)
.await?
.ok_or_else(|| eyre!("No ip available"))?;
let mut svc = self
.seed
.ctx
.net_controller
.create_service(self.seed.manifest.id.clone(), ip)
.await
.map_err(|e| eyre!("Could not get to net controller: {e:?}"))?;
let mut secrets = self.seed.ctx.secret_store.acquire().await?;
svc.remove_lan(id, external)
.await
.map_err(|e| eyre!("Could not add to local: {e:?}"))?;
Ok(())
} }
} }

View File

@@ -251,7 +251,10 @@ async fn run_main_log_result(result: RunMainResult, seed: Arc<manager_seed::Mana
} }
} }
pub(super) async fn get_status(db: &mut PatchDbHandle, manifest: &Manifest) -> Result<MainStatus, Error> { pub(super) async fn get_status(
db: &mut PatchDbHandle,
manifest: &Manifest,
) -> Result<MainStatus, Error> {
Ok(crate::db::DatabaseModel::new() Ok(crate::db::DatabaseModel::new()
.package_data() .package_data()
.idx_model(&manifest.id) .idx_model(&manifest.id)
@@ -283,7 +286,6 @@ async fn set_status(
.status() .status()
.main() .main()
.put(db, main_status) .put(db, main_status)
.await? .await?;
.clone();
Ok(()) Ok(())
} }

View File

@@ -4,7 +4,7 @@ use std::sync::Arc;
use std::task::Poll; use std::task::Poll;
use std::time::Duration; use std::time::Duration;
use color_eyre::eyre::eyre; use color_eyre::{eyre::eyre, Report};
use embassy_container_init::ProcessGroupId; use embassy_container_init::ProcessGroupId;
use futures::future::BoxFuture; use futures::future::BoxFuture;
use futures::{FutureExt, TryFutureExt}; use futures::{FutureExt, TryFutureExt};
@@ -833,6 +833,18 @@ async fn main_health_check_daemon(seed: Arc<ManagerSeed>) {
type RuntimeOfCommand = NonDetachingJoinHandle<Result<Result<NoOutput, (i32, String)>, Error>>; type RuntimeOfCommand = NonDetachingJoinHandle<Result<Result<NoOutput, (i32, String)>, Error>>;
async fn try_get_running_ip(seed: &ManagerSeed) -> Result<Option<Ipv4Addr>, Report> {
Ok(container_inspect(seed)
.await
.map(|x| x.network_settings)?
.and_then(|ns| ns.networks)
.and_then(|mut n| n.remove("start9"))
.and_then(|es| es.ip_address)
.filter(|ip| !ip.is_empty())
.map(|ip| ip.parse())
.transpose()?)
}
async fn get_running_ip(seed: &ManagerSeed, mut runtime: &mut RuntimeOfCommand) -> GetRunningIp { async fn get_running_ip(seed: &ManagerSeed, mut runtime: &mut RuntimeOfCommand) -> GetRunningIp {
loop { loop {
match container_inspect(seed).await { match container_inspect(seed).await {

View File

@@ -3,8 +3,9 @@ use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
use color_eyre::Report;
use embassy_container_init::{ProcessGroupId, SignalGroup, SignalGroupParams}; use embassy_container_init::{ProcessGroupId, SignalGroup, SignalGroupParams};
use helpers::{OsApi, UnixRpcClient}; use helpers::{Address, AddressSchemaLocal, AddressSchemaOnion, Callback, OsApi, UnixRpcClient};
pub use js_engine::JsError; pub use js_engine::JsError;
use js_engine::{JsExecutionEnvironment, PathForVolumeId}; use js_engine::{JsExecutionEnvironment, PathForVolumeId};
use models::{ErrorKind, VolumeId}; use models::{ErrorKind, VolumeId};
@@ -49,7 +50,33 @@ struct SandboxOsApi {
_ctx: RpcContext, _ctx: RpcContext,
} }
#[async_trait::async_trait] #[async_trait::async_trait]
impl OsApi for SandboxOsApi {} impl OsApi for SandboxOsApi {
#[allow(unused_variables)]
async fn get_service_config(
&self,
id: PackageId,
path: &str,
callback: Callback,
) -> Result<serde_json::Value, Report> {
todo!()
}
#[allow(unused_variables)]
async fn bind_local(
&self,
internal_port: u16,
address_schema: AddressSchemaLocal,
) -> Result<Address, Report> {
todo!()
}
#[allow(unused_variables)]
async fn bind_onion(
&self,
internal_port: u16,
address_schema: AddressSchemaOnion,
) -> Result<Address, Report> {
todo!()
}
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)] #[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "kebab-case")]
@@ -181,190 +208,189 @@ fn unwrap_known_error<O: DeserializeOwned>(
} }
} }
#[tokio::test] #[cfg(test)]
async fn js_action_execute() { mod tests {
let js_action = JsProcedure { args: vec![] }; use super::*;
let path: PathBuf = "test/js_action_execute/" use helpers::{Address, AddressSchemaLocal, AddressSchemaOnion, Callback, OsApi};
.parse::<PathBuf>() use serde_json::{json, Value};
.unwrap() use tokio::sync::watch;
.canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::GetConfig;
let volumes: Volumes = serde_json::from_value(serde_json::json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap();
let input: Option<serde_json::Value> = Some(serde_json::json!({"test":123}));
let timeout = Some(Duration::from_secs(10));
let _output: crate::config::action::ConfigRes = js_action
.execute(
&path,
&package_id,
&package_version,
name,
&volumes,
input,
timeout,
ProcessGroupId(0),
None,
None,
)
.await
.unwrap()
.unwrap();
assert_eq!(
&std::fs::read_to_string(
"test/js_action_execute/package-data/volumes/test-package/data/main/test.log"
)
.unwrap(),
"This is a test"
);
std::fs::remove_file(
"test/js_action_execute/package-data/volumes/test-package/data/main/test.log",
)
.unwrap();
}
#[tokio::test] struct OsApiMock {
async fn js_action_execute_error() { config_callbacks: watch::Sender<Vec<Callback>>,
let js_action = JsProcedure { args: vec![] }; }
let path: PathBuf = "test/js_action_execute/" impl Default for OsApiMock {
.parse::<PathBuf>() fn default() -> Self {
.unwrap() Self {
.canonicalize() config_callbacks: watch::channel(Vec::new()).0,
.unwrap(); }
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::SetConfig;
let volumes: Volumes = serde_json::from_value(serde_json::json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
} }
})) }
.unwrap();
let input: Option<serde_json::Value> = None;
let timeout = Some(Duration::from_secs(10));
let output: Result<serde_json::Value, _> = js_action
.execute(
&path,
&package_id,
&package_version,
name,
&volumes,
input,
timeout,
ProcessGroupId(0),
None,
None,
)
.await
.unwrap();
assert_eq!("Err((2, \"Not setup\"))", &format!("{:?}", output));
}
#[tokio::test] #[async_trait::async_trait]
async fn js_action_fetch() { impl OsApi for OsApiMock {
let js_action = JsProcedure { args: vec![] }; #[allow(unused_variables)]
let path: PathBuf = "test/js_action_execute/" async fn get_service_config(
.parse::<PathBuf>() &self,
.unwrap() id: PackageId,
.canonicalize() path: &str,
.unwrap(); callback: Callback,
let package_id = "test-package".parse().unwrap(); ) -> Result<serde_json::Value, Report> {
let package_version: Version = "0.3.0.3".parse().unwrap(); println!("Adding callback");
let name = ProcedureName::Action("fetch".parse().unwrap()); self.config_callbacks.send_modify(|x| x.push(callback));
let volumes: Volumes = serde_json::from_value(serde_json::json!({ Ok(Value::Null)
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
} }
})) #[allow(unused_variables)]
.unwrap(); async fn bind_local(
let input: Option<serde_json::Value> = None; &self,
let timeout = Some(Duration::from_secs(10)); internal_port: u16,
js_action address_schema: AddressSchemaLocal,
.execute::<serde_json::Value, serde_json::Value>( ) -> Result<Address, Report> {
&path, todo!()
&package_id, }
&package_version, #[allow(unused_variables)]
name, async fn bind_onion(
&volumes, &self,
input, internal_port: u16,
timeout, address_schema: AddressSchemaOnion,
ProcessGroupId(0), ) -> Result<Address, Report> {
None, todo!()
None, }
}
#[tokio::test]
async fn js_action_execute() {
let js_action = JsProcedure { args: vec![] };
let path: PathBuf = "test/js_action_execute/"
.parse::<PathBuf>()
.unwrap()
.canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::GetConfig;
let volumes: Volumes = serde_json::from_value(json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap();
let input: Option<serde_json::Value> = Some(json!({"test":123}));
let timeout = Some(Duration::from_secs(10));
let _output: crate::config::action::ConfigRes = js_action
.execute(
&path,
&package_id,
&package_version,
name,
&volumes,
input,
timeout,
ProcessGroupId(0),
None,
Arc::new(OsApiMock::default()),
)
.await
.unwrap()
.unwrap();
assert_eq!(
&std::fs::read_to_string(
"test/js_action_execute/package-data/volumes/test-package/data/main/test.log"
)
.unwrap(),
"This is a test"
);
std::fs::remove_file(
"test/js_action_execute/package-data/volumes/test-package/data/main/test.log",
) )
.await
.unwrap()
.unwrap(); .unwrap();
} }
#[tokio::test] #[tokio::test]
async fn js_test_slow() { async fn js_action_execute_error() {
let js_action = JsProcedure { args: vec![] }; let js_action = JsProcedure { args: vec![] };
let path: PathBuf = "test/js_action_execute/" let path: PathBuf = "test/js_action_execute/"
.parse::<PathBuf>() .parse::<PathBuf>()
.unwrap() .unwrap()
.canonicalize() .canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::SetConfig;
let volumes: Volumes = serde_json::from_value(json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap(); .unwrap();
let package_id = "test-package".parse().unwrap(); let input: Option<serde_json::Value> = None;
let package_version: Version = "0.3.0.3".parse().unwrap(); let timeout = Some(Duration::from_secs(10));
let name = ProcedureName::Action("slow".parse().unwrap()); let output: Result<serde_json::Value, _> = js_action
let volumes: Volumes = serde_json::from_value(serde_json::json!({ .execute(
"main": { &path,
"type": "data" &package_id,
}, &package_version,
"compat": { name,
"type": "assets" &volumes,
}, input,
"filebrowser" :{ timeout,
"package-id": "filebrowser", ProcessGroupId(0),
"path": "data", None,
"readonly": true, Arc::new(OsApiMock::default()),
"type": "pointer", )
"volume-id": "main", .await
} .unwrap();
})) assert_eq!("Err((2, \"Not setup\"))", &format!("{:?}", output));
.unwrap(); }
let input: Option<serde_json::Value> = None;
let timeout = Some(Duration::from_secs(10)); #[tokio::test]
tracing::debug!("testing start"); async fn js_action_fetch() {
tokio::select! { let js_action = JsProcedure { args: vec![] };
a = js_action let path: PathBuf = "test/js_action_execute/"
.parse::<PathBuf>()
.unwrap()
.canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::Action("fetch".parse().unwrap());
let volumes: Volumes = serde_json::from_value(json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap();
let input: Option<serde_json::Value> = None;
let timeout = Some(Duration::from_secs(10));
js_action
.execute::<serde_json::Value, serde_json::Value>( .execute::<serde_json::Value, serde_json::Value>(
&path, &path,
&package_id, &package_id,
@@ -375,117 +401,138 @@ async fn js_test_slow() {
timeout, timeout,
ProcessGroupId(0), ProcessGroupId(0),
None, None,
None, Arc::new(OsApiMock::default()),
) => { a.unwrap().unwrap(); }, )
_ = tokio::time::sleep(Duration::from_secs(1)) => () .await
.unwrap()
.unwrap();
} }
tracing::debug!("testing end should");
tokio::time::sleep(Duration::from_secs(2)).await;
tracing::debug!("Done");
}
#[tokio::test]
async fn js_action_var_arg() {
let js_action = JsProcedure {
args: vec![42.into()],
};
let path: PathBuf = "test/js_action_execute/"
.parse::<PathBuf>()
.unwrap()
.canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::Action("js-action-var-arg".parse().unwrap());
let volumes: Volumes = serde_json::from_value(serde_json::json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap();
let input: Option<serde_json::Value> = None;
let timeout = Some(Duration::from_secs(10));
js_action
.execute::<serde_json::Value, serde_json::Value>(
&path,
&package_id,
&package_version,
name,
&volumes,
input,
timeout,
ProcessGroupId(0),
None,
None,
)
.await
.unwrap()
.unwrap();
}
#[tokio::test] #[tokio::test]
async fn js_action_test_rename() { async fn js_test_slow() {
let js_action = JsProcedure { args: vec![] }; let js_action = JsProcedure { args: vec![] };
let path: PathBuf = "test/js_action_execute/" let path: PathBuf = "test/js_action_execute/"
.parse::<PathBuf>() .parse::<PathBuf>()
.unwrap() .unwrap()
.canonicalize() .canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::Action("slow".parse().unwrap());
let volumes: Volumes = serde_json::from_value(json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap(); .unwrap();
let package_id = "test-package".parse().unwrap(); let input: Option<serde_json::Value> = None;
let package_version: Version = "0.3.0.3".parse().unwrap(); let timeout = Some(Duration::from_secs(10));
let name = ProcedureName::Action("test-rename".parse().unwrap()); tracing::debug!("testing start");
let volumes: Volumes = serde_json::from_value(serde_json::json!({ tokio::select! {
"main": { a = js_action
"type": "data" .execute::<serde_json::Value, serde_json::Value>(
}, &path,
"compat": { &package_id,
"type": "assets" &package_version,
}, name,
"filebrowser" :{ &volumes,
"package-id": "filebrowser", input,
"path": "data", timeout,
"readonly": true, ProcessGroupId(0),
"type": "pointer", None,
"volume-id": "main", Arc::new(OsApiMock::default())
) => { a.unwrap().unwrap(); },
_ = tokio::time::sleep(Duration::from_secs(1)) => ()
} }
})) tracing::debug!("testing end should");
.unwrap(); tokio::time::sleep(Duration::from_secs(2)).await;
let input: Option<serde_json::Value> = None; tracing::debug!("Done");
let timeout = Some(Duration::from_secs(10)); }
js_action #[tokio::test]
.execute::<serde_json::Value, serde_json::Value>( async fn js_action_var_arg() {
&path, let js_action = JsProcedure {
&package_id, args: vec![42.into()],
&package_version, };
name, let path: PathBuf = "test/js_action_execute/"
&volumes, .parse::<PathBuf>()
input, .unwrap()
timeout, .canonicalize()
ProcessGroupId(0), .unwrap();
None, let package_id = "test-package".parse().unwrap();
None, let package_version: Version = "0.3.0.3".parse().unwrap();
) let name = ProcedureName::Action("js-action-var-arg".parse().unwrap());
.await let volumes: Volumes = serde_json::from_value(json!({
.unwrap() "main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap(); .unwrap();
} let input: Option<serde_json::Value> = None;
let timeout = Some(Duration::from_secs(10));
js_action
.execute::<serde_json::Value, serde_json::Value>(
&path,
&package_id,
&package_version,
name,
&volumes,
input,
timeout,
ProcessGroupId(0),
None,
Arc::new(OsApiMock::default()),
)
.await
.unwrap()
.unwrap();
}
#[tokio::test] #[tokio::test]
async fn js_action_test_deep_dir() { async fn js_action_test_rename() {
let js_action = JsProcedure { args: vec![] }; let js_action = JsProcedure { args: vec![] };
let path: PathBuf = "test/js_action_execute/" let path: PathBuf = "test/js_action_execute/"
.parse::<PathBuf>() .parse::<PathBuf>()
.unwrap() .unwrap()
.canonicalize() .canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::Action("test-rename".parse().unwrap());
let volumes: Volumes = serde_json::from_value(json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap(); .unwrap();
let package_id = "test-package".parse().unwrap(); let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap(); let package_version: Version = "0.3.0.3".parse().unwrap();
@@ -663,51 +710,301 @@ async fn js_action_test_read_dir() {
.unwrap(); .unwrap();
} }
#[tokio::test] #[tokio::test]
async fn js_rsync() { async fn js_action_test_deep_dir() {
let js_action = JsProcedure { args: vec![] }; let js_action = JsProcedure { args: vec![] };
let path: PathBuf = "test/js_action_execute/" let path: PathBuf = "test/js_action_execute/"
.parse::<PathBuf>() .parse::<PathBuf>()
.unwrap() .unwrap()
.canonicalize() .canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::Action("test-deep-dir".parse().unwrap());
let volumes: Volumes = serde_json::from_value(json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap(); .unwrap();
let package_id = "test-package".parse().unwrap(); let input: Option<serde_json::Value> = None;
let package_version: Version = "0.3.0.3".parse().unwrap(); let timeout = Some(Duration::from_secs(10));
let name = ProcedureName::Action("test-rsync".parse().unwrap()); js_action
let volumes: Volumes = serde_json::from_value(serde_json::json!({ .execute::<serde_json::Value, serde_json::Value>(
"main": { &path,
"type": "data" &package_id,
}, &package_version,
"compat": { name,
"type": "assets" &volumes,
}, input,
"filebrowser" :{ timeout,
"package-id": "filebrowser", ProcessGroupId(0),
"path": "data", None,
"readonly": true, Arc::new(OsApiMock::default()),
"type": "pointer", )
"volume-id": "main", .await
} .unwrap()
})) .unwrap();
.unwrap(); }
let input: Option<serde_json::Value> = None; #[tokio::test]
let timeout = Some(Duration::from_secs(10)); async fn js_action_test_deep_dir_escape() {
js_action let js_action = JsProcedure { args: vec![] };
.execute::<serde_json::Value, serde_json::Value>( let path: PathBuf = "test/js_action_execute/"
&path, .parse::<PathBuf>()
&package_id, .unwrap()
&package_version, .canonicalize()
name, .unwrap();
&volumes, let package_id = "test-package".parse().unwrap();
input, let package_version: Version = "0.3.0.3".parse().unwrap();
timeout, let name = ProcedureName::Action("test-deep-dir-escape".parse().unwrap());
ProcessGroupId(0), let volumes: Volumes = serde_json::from_value(json!({
None, "main": {
None, "type": "data"
) },
.await "compat": {
.unwrap() "type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap(); .unwrap();
let input: Option<serde_json::Value> = None;
let timeout = Some(Duration::from_secs(10));
js_action
.execute::<serde_json::Value, serde_json::Value>(
&path,
&package_id,
&package_version,
name,
&volumes,
input,
timeout,
ProcessGroupId(0),
None,
Arc::new(OsApiMock::default()),
)
.await
.unwrap()
.unwrap();
}
#[tokio::test]
async fn js_permissions_and_own() {
let js_action = JsProcedure { args: vec![] };
let path: PathBuf = "test/js_action_execute/"
.parse::<PathBuf>()
.unwrap()
.canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::Action("test-permission-chown".parse().unwrap());
let volumes: Volumes = serde_json::from_value(json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap();
let input: Option<serde_json::Value> = None;
let timeout = Some(Duration::from_secs(10));
js_action
.execute::<serde_json::Value, serde_json::Value>(
&path,
&package_id,
&package_version,
name,
&volumes,
input,
timeout,
ProcessGroupId(0),
None,
Arc::new(OsApiMock::default()),
)
.await
.unwrap()
.unwrap();
}
#[tokio::test]
async fn js_action_test_zero_dir() {
let js_action = JsProcedure { args: vec![] };
let path: PathBuf = "test/js_action_execute/"
.parse::<PathBuf>()
.unwrap()
.canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::Action("test-zero-dir".parse().unwrap());
let volumes: Volumes = serde_json::from_value(json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap();
let input: Option<serde_json::Value> = None;
let timeout = Some(Duration::from_secs(10));
js_action
.execute::<serde_json::Value, serde_json::Value>(
&path,
&package_id,
&package_version,
name,
&volumes,
input,
timeout,
ProcessGroupId(0),
None,
Arc::new(OsApiMock::default()),
)
.await
.unwrap()
.unwrap();
}
#[tokio::test]
async fn js_rsync() {
let js_action = JsProcedure { args: vec![] };
let path: PathBuf = "test/js_action_execute/"
.parse::<PathBuf>()
.unwrap()
.canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::Action("test-rsync".parse().unwrap());
let volumes: Volumes = serde_json::from_value(json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap();
let input: Option<serde_json::Value> = None;
let timeout = Some(Duration::from_secs(10));
js_action
.execute::<serde_json::Value, serde_json::Value>(
&path,
&package_id,
&package_version,
name,
&volumes,
input,
timeout,
ProcessGroupId(0),
None,
Arc::new(OsApiMock::default()),
)
.await
.unwrap()
.unwrap();
}
#[tokio::test]
async fn test_callback() {
let api = Arc::new(OsApiMock::default());
let action_api = api.clone();
let spawned = tokio::spawn(async move {
let mut watching = api.config_callbacks.subscribe();
loop {
if watching.borrow().is_empty() {
watching.changed().await.unwrap();
continue;
}
api.config_callbacks.send_modify(|x| {
x[0](json!("This is something across the wire!"))
.map_err(|e| format!("Failed call"))
.unwrap();
});
break;
}
});
let js_action = JsProcedure { args: vec![] };
let path: PathBuf = "test/js_action_execute/"
.parse::<PathBuf>()
.unwrap()
.canonicalize()
.unwrap();
let package_id = "test-package".parse().unwrap();
let package_version: Version = "0.3.0.3".parse().unwrap();
let name = ProcedureName::Action("test-callback".parse().unwrap());
let volumes: Volumes = serde_json::from_value(json!({
"main": {
"type": "data"
},
"compat": {
"type": "assets"
},
"filebrowser" :{
"package-id": "filebrowser",
"path": "data",
"readonly": true,
"type": "pointer",
"volume-id": "main",
}
}))
.unwrap();
let input: Option<serde_json::Value> = None;
let timeout = Some(Duration::from_secs(10));
js_action
.execute::<serde_json::Value, serde_json::Value>(
&path,
&package_id,
&package_version,
name,
&volumes,
input,
timeout,
ProcessGroupId(0),
None,
action_api,
)
.await
.unwrap()
.unwrap();
spawned.await.unwrap();
}
} }
#[tokio::test] #[tokio::test]

View File

@@ -730,7 +730,7 @@ export async function setConfig(effects) {
const assert = (condition, message) => { const assert = (condition, message) => {
if (!condition) { if (!condition) {
throw new Error(message); throw ({error: message});
} }
}; };
const ackermann = (m, n) => { const ackermann = (m, n) => {
@@ -1038,6 +1038,93 @@ export const action = {
.catch(() => {}); .catch(() => {});
} }
}, },
/**
* Testing callbacks?
* @param {*} effects
* @param {*} _input
* @returns
*/
async "test-callback"(effects, _input) {
await Promise.race([
new Promise(done => effects.getServiceConfig({serviceId: 'something', configPath: "string", onChange: done})),
new Promise (async () => {
await effects.sleep(100)
throw new Error("Currently in sleeping")
}
)])
return {
result: {
copyable: false,
message: "Done",
version: "0",
qr: false,
},
};
},
/**
* We wanted to change the permissions and the ownership during the
* backing up, there where cases where the ownership is weird and
* broke for non root users.
* Note: Test for the chmod is broken and turned off because it only works when ran by root
* @param {*} effects
* @param {*} _input
* @returns
*/
async "test-permission-chown"(effects, _input) {
await effects
.removeDir({
volumeId: "main",
path: "pem-chown",
})
.catch(() => {});
await effects.createDir({
volumeId: "main",
path: "pem-chown/deep/123",
});
await effects.writeFile({
volumeId: "main",
path: "pem-chown/deep/123/test.txt",
toWrite: "Hello World",
});
const firstMetaData = await effects.metadata({
volumeId: 'main',
path: 'pem-chown/deep/123/test.txt',
})
assert(firstMetaData.readonly === false, `The readonly (${firstMetaData.readonly}) is wrong`);
const previousUid = firstMetaData.uid;
const expected = 1234
await effects.setPermissions({
volumeId: 'main',
path: 'pem-chown/deep/123/test.txt',
readonly: true
})
const chownError = await effects.chown({
volumeId: 'main',
path: 'pem-chown/deep',
uid: expected
}).then(() => true, () => false)
let metaData = await effects.metadata({
volumeId: 'main',
path: 'pem-chown/deep/123/test.txt',
})
assert(metaData.readonly === true, `The readonly (${metaData.readonly}) is wrong`);
if (chownError) {
assert(metaData.uid === expected, `The uuid (${metaData.uid}) is wrong, should be more than ${previousUid}`);
}
return {
result: {
copyable: false,
message: "Done",
version: "0",
qr: false,
},
};
},
async "test-disk-usage"(effects, _input) { async "test-disk-usage"(effects, _input) {
const usage = await effects.diskUsage() const usage = await effects.diskUsage()
@@ -1045,3 +1132,4 @@ export const action = {
} }
}; };

View File

@@ -1,6 +1,7 @@
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
use models::Error; use color_eyre::Report;
use models::PackageId; use models::PackageId;
use models::{Error, InterfaceId};
use serde_json::Value; use serde_json::Value;
pub struct RuntimeDropped; pub struct RuntimeDropped;
@@ -13,6 +14,28 @@ fn method_not_available() -> Error {
models::ErrorKind::InvalidRequest, models::ErrorKind::InvalidRequest,
) )
} }
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct AddressSchemaOnion {
pub id: InterfaceId,
pub external_port: u16,
}
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct AddressSchemaLocal {
pub id: InterfaceId,
pub external_port: u16,
}
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Address(pub String);
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Domain;
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Name;
#[async_trait::async_trait] #[async_trait::async_trait]
#[allow(unused_variables)] #[allow(unused_variables)]
@@ -22,7 +45,47 @@ pub trait OsApi: Send + Sync + 'static {
id: PackageId, id: PackageId,
path: &str, path: &str,
callback: Callback, callback: Callback,
) -> Result<Value, Error> { ) -> Result<Value, Report>;
Err(method_not_available())
async fn bind_local(
&self,
internal_port: u16,
address_schema: AddressSchemaLocal,
) -> Result<Address, Report>;
async fn bind_onion(
&self,
internal_port: u16,
address_schema: AddressSchemaOnion,
) -> Result<Address, Report>;
async fn unbind_local(&self, id: InterfaceId, external: u16) -> Result<(), Report> {
todo!()
}
async fn unbind_onion(&self, id: InterfaceId, external: u16) -> Result<(), Report> {
todo!()
}
async fn list_address(&self) -> Result<Vec<Address>, Report> {
todo!()
}
async fn list_domains(&self) -> Result<Vec<Domain>, Report> {
todo!()
}
async fn alloc_onion(&self, id: String) -> Result<Name, Report> {
todo!()
}
async fn dealloc_onion(&self, id: String) -> Result<(), Report> {
todo!()
}
async fn alloc_local(&self, id: String) -> Result<Name, Report> {
todo!()
}
async fn dealloc_local(&self, id: String) -> Result<(), Report> {
todo!()
}
async fn alloc_forward(&self, id: String) -> Result<u16, Report> {
todo!()
}
async fn dealloc_forward(&self, id: String) -> Result<(), Report> {
todo!()
} }
} }

View File

@@ -70,6 +70,9 @@ impl Rsync {
for exclude in options.exclude { for exclude in options.exclude {
cmd.arg(format!("--exclude={}", exclude)); cmd.arg(format!("--exclude={}", exclude));
} }
if options.no_permissions {
cmd.arg("--no-perms");
}
let mut command = cmd let mut command = cmd
.arg("-acAXH") .arg("-acAXH")
.arg("--info=progress2") .arg("--info=progress2")

View File

@@ -1,10 +1,24 @@
import Deno from "/deno_global.js"; import Deno from "/deno_global.js";
import * as mainModule from "/embassy.js"; import * as mainModule from "/embassy.js";
// throw new Error("I'm going crasy")
function requireParam(param) { function requireParam(param) {
throw new Error(`Missing required parameter ${param}`); throw new Error(`Missing required parameter ${param}`);
} }
const callbackName = (() => {
let count = 0;
return () => `callback${count++}${Math.floor(Math.random() * 100000)}`;
})();
const callbackMapping = {};
const registerCallback = (fn) => {
const uuid = callbackName(); // TODO
callbackMapping[uuid] = fn;
return uuid;
};
/** /**
* This is using the simplified json pointer spec, using no escapes and arrays * This is using the simplified json pointer spec, using no escapes and arrays
* @param {object} obj * @param {object} obj
@@ -35,42 +49,75 @@ const writeFile = (
) => Deno.core.opAsync("write_file", volumeId, path, toWrite); ) => Deno.core.opAsync("write_file", volumeId, path, toWrite);
const readFile = ( const readFile = (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), {
volumeId = requireParam("volumeId"),
path = requireParam("path"),
} = requireParam("options"),
) => Deno.core.opAsync("read_file", volumeId, path); ) => Deno.core.opAsync("read_file", volumeId, path);
const runDaemon = ( const runDaemon = (
{ command = requireParam("command"), args = [] } = requireParam("options"), { command = requireParam("command"), args = [] } = requireParam("options"),
) => { ) => {
let id = Deno.core.opAsync("start_command", command, args, "inherit", null); let id = Deno.core.opAsync("start_command", command, args, "inherit", null);
let processId = id.then(x => x.processId) let processId = id.then((x) => x.processId);
let waitPromise = null; let waitPromise = null;
return { return {
processId, processId,
async wait() { async wait() {
waitPromise = waitPromise || Deno.core.opAsync("wait_command", await processId) waitPromise = waitPromise ||
return waitPromise Deno.core.opAsync("wait_command", await processId);
return waitPromise;
}, },
async term(signal = 15) { async term(signal = 15) {
return Deno.core.opAsync("send_signal", await processId, 15) return Deno.core.opAsync("send_signal", await processId, 15);
} },
} };
}; };
const runCommand = async ( const runCommand = async (
{ command = requireParam("command"), args = [], timeoutMillis = 30000 } = requireParam("options"), {
command = requireParam("command"),
args = [],
timeoutMillis = 30000,
} = requireParam("options"),
) => { ) => {
let id = Deno.core.opAsync("start_command", command, args, "collect", timeoutMillis); let id = Deno.core.opAsync(
let pid = id.then(x => x.processId) "start_command",
return Deno.core.opAsync("wait_command", await pid) command,
args,
"collect",
timeoutMillis,
);
let pid = id.then((x) => x.processId);
return Deno.core.opAsync("wait_command", await pid);
}; };
const bindLocal = async (
{
internalPort = requireParam("internalPort"),
name = requireParam("name"),
externalPort = requireParam("externalPort"),
} = requireParam("options"),
) => {
return Deno.core.opAsync("bind_local", internalPort, { name, externalPort });
};
const bindTor = async (
{
internalPort = requireParam("internalPort"),
name = requireParam("name"),
externalPort = requireParam("externalPort"),
} = requireParam("options"),
) => {
return Deno.core.opAsync("bind_onion", internalPort, { name, externalPort });
};
const signalGroup = async ( const signalGroup = async (
{ gid = requireParam("gid"), signal = requireParam("signal") } = requireParam("gid and signal") { gid = requireParam("gid"), signal = requireParam("signal") } = requireParam(
"gid and signal",
),
) => { ) => {
return Deno.core.opAsync("signal_group", gid, signal); return Deno.core.opAsync("signal_group", gid, signal);
}; };
const sleep = (timeMs = requireParam("timeMs"), const sleep = (timeMs = requireParam("timeMs")) =>
) => Deno.core.opAsync("sleep", timeMs); Deno.core.opAsync("sleep", timeMs);
const rename = ( const rename = (
{ {
@@ -81,7 +128,10 @@ const rename = (
} = requireParam("options"), } = requireParam("options"),
) => Deno.core.opAsync("rename", srcVolume, srcPath, dstVolume, dstPath); ) => Deno.core.opAsync("rename", srcVolume, srcPath, dstVolume, dstPath);
const metadata = async ( const metadata = async (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), {
volumeId = requireParam("volumeId"),
path = requireParam("path"),
} = requireParam("options"),
) => { ) => {
const data = await Deno.core.opAsync("metadata", volumeId, path); const data = await Deno.core.opAsync("metadata", volumeId, path);
return { return {
@@ -92,7 +142,10 @@ const metadata = async (
}; };
}; };
const removeFile = ( const removeFile = (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), {
volumeId = requireParam("volumeId"),
path = requireParam("path"),
} = requireParam("options"),
) => Deno.core.opAsync("remove_file", volumeId, path); ) => Deno.core.opAsync("remove_file", volumeId, path);
const isSandboxed = () => Deno.core.opSync("is_sandboxed"); const isSandboxed = () => Deno.core.opSync("is_sandboxed");
@@ -129,24 +182,38 @@ const chmod = async (
return await Deno.core.opAsync("chmod", volumeId, path, mode); return await Deno.core.opAsync("chmod", volumeId, path, mode);
}; };
const readJsonFile = async ( const readJsonFile = async (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), {
volumeId = requireParam("volumeId"),
path = requireParam("path"),
} = requireParam("options"),
) => JSON.parse(await readFile({ volumeId, path })); ) => JSON.parse(await readFile({ volumeId, path }));
const createDir = ( const createDir = (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), {
volumeId = requireParam("volumeId"),
path = requireParam("path"),
} = requireParam("options"),
) => Deno.core.opAsync("create_dir", volumeId, path); ) => Deno.core.opAsync("create_dir", volumeId, path);
const readDir = ( const readDir = (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), { volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"),
) => Deno.core.opAsync("read_dir", volumeId, path); ) => Deno.core.opAsync("read_dir", volumeId, path);
const removeDir = ( const removeDir = (
{ volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), {
volumeId = requireParam("volumeId"),
path = requireParam("path"),
} = requireParam("options"),
) => Deno.core.opAsync("remove_dir", volumeId, path); ) => Deno.core.opAsync("remove_dir", volumeId, path);
const trace = (whatToTrace = requireParam('whatToTrace')) => Deno.core.opAsync("log_trace", whatToTrace); const trace = (whatToTrace = requireParam("whatToTrace")) =>
const warn = (whatToTrace = requireParam('whatToTrace')) => Deno.core.opAsync("log_warn", whatToTrace); Deno.core.opAsync("log_trace", whatToTrace);
const error = (whatToTrace = requireParam('whatToTrace')) => Deno.core.opAsync("log_error", whatToTrace); const warn = (whatToTrace = requireParam("whatToTrace")) =>
const debug = (whatToTrace = requireParam('whatToTrace')) => Deno.core.opAsync("log_debug", whatToTrace); Deno.core.opAsync("log_warn", whatToTrace);
const info = (whatToTrace = requireParam('whatToTrace')) => Deno.core.opAsync("log_info", whatToTrace); const error = (whatToTrace = requireParam("whatToTrace")) =>
const fetch = async (url = requireParam ('url'), options = null) => { Deno.core.opAsync("log_error", whatToTrace);
const debug = (whatToTrace = requireParam("whatToTrace")) =>
Deno.core.opAsync("log_debug", whatToTrace);
const info = (whatToTrace = requireParam("whatToTrace")) =>
Deno.core.opAsync("log_info", whatToTrace);
const fetch = async (url = requireParam("url"), options = null) => {
const { body, ...response } = await Deno.core.opAsync("fetch", url, options); const { body, ...response } = await Deno.core.opAsync("fetch", url, options);
const textValue = Promise.resolve(body); const textValue = Promise.resolve(body);
return { return {
@@ -161,28 +228,35 @@ const fetch = async (url = requireParam ('url'), options = null) => {
}; };
const runRsync = ( const runRsync = (
{ {
srcVolume = requireParam("srcVolume"), srcVolume = requireParam("srcVolume"),
dstVolume = requireParam("dstVolume"), dstVolume = requireParam("dstVolume"),
srcPath = requireParam("srcPath"), srcPath = requireParam("srcPath"),
dstPath = requireParam("dstPath"), dstPath = requireParam("dstPath"),
options = requireParam("options"), options = requireParam("options"),
} = requireParam("options"), } = requireParam("options"),
) => { ) => {
let id = Deno.core.opAsync("rsync", srcVolume, srcPath, dstVolume, dstPath, options); let id = Deno.core.opAsync(
"rsync",
srcVolume,
srcPath,
dstVolume,
dstPath,
options,
);
let waitPromise = null; let waitPromise = null;
return { return {
async id() { async id() {
return id return id;
}, },
async wait() { async wait() {
waitPromise = waitPromise || Deno.core.opAsync("rsync_wait", await id) waitPromise = waitPromise || Deno.core.opAsync("rsync_wait", await id);
return waitPromise return waitPromise;
}, },
async progress() { async progress() {
return Deno.core.opAsync("rsync_progress", await id) return Deno.core.opAsync("rsync_progress", await id);
} },
} };
}; };
const diskUsage = async ({ const diskUsage = async ({
@@ -193,63 +267,105 @@ const diskUsage = async ({
return { used, total } return { used, total }
} }
const callbackMapping = {} globalThis.runCallback = (uuid, data) => callbackMapping[uuid](data);
const registerCallback = (fn) => { // window.runCallback = runCallback;
const uuid = generateUuid(); // TODO // Deno.runCallback = runCallback;
callbackMapping[uuid] = fn;
return uuid
}
const runCallback = (uuid, data) => callbackMapping[uuid](data)
const getServiceConfig = async (serviceId, configPath, onChange) => { const getServiceConfig = async (
await Deno.core.opAsync("get_service_config", serviceId, configPath, registerCallback(onChange)) {
} serviceId = requireParam("serviceId"),
configPath = requireParam("configPath"),
onChange = requireParam("onChange"),
} = requireParam("options"),
) => {
return await Deno.core.opAsync(
"get_service_config",
serviceId,
configPath,
registerCallback(onChange),
);
};
const setPermissions = async (
{
volumeId = requireParam("volumeId"),
path = requireParam("path"),
readonly = requireParam("readonly"),
} = requireParam("options"),
) => {
return await Deno.core.opAsync("set_permissions", volumeId, path, readonly);
};
const currentFunction = Deno.core.opSync("current_function"); const currentFunction = Deno.core.opSync("current_function");
const input = Deno.core.opSync("get_input"); const input = Deno.core.opSync("get_input");
const variable_args = Deno.core.opSync("get_variable_args"); const variable_args = Deno.core.opSync("get_variable_args");
const setState = (x) => Deno.core.opSync("set_value", x); const setState = (x) => Deno.core.opAsync("set_value", x);
const effects = { const effects = {
bindLocal,
bindTor,
chmod, chmod,
chown, chown,
writeFile, createDir,
readFile,
writeJsonFile,
readJsonFile,
error,
warn,
debug, debug,
trace, diskUsage,
error,
fetch,
getServiceConfig,
getServiceConfig,
info, info,
isSandboxed, isSandboxed,
fetch,
removeFile,
createDir,
removeDir,
metadata, metadata,
readDir,
readFile,
readJsonFile,
removeDir,
removeFile,
rename, rename,
runCommand, runCommand,
sleep,
runDaemon, runDaemon,
signalGroup,
runRsync, runRsync,
readDir, setPermissions,
diskUsage, signalGroup,
getServiceConfig, sleep,
trace,
warn,
writeFile,
writeJsonFile,
}; };
const defaults = { const defaults = {
"handleSignal": (effects, { gid, signal }) => { handleSignal: (effects, { gid, signal }) => {
return effects.signalGroup({ gid, signal }) return effects.signalGroup({ gid, signal });
},
};
function safeToString(fn, orValue = "") {
try {
return fn();
} catch (e) {
return orValue;
} }
} }
const runFunction = jsonPointerValue(mainModule, currentFunction) || jsonPointerValue(defaults, currentFunction); const runFunction = jsonPointerValue(mainModule, currentFunction) ||
jsonPointerValue(defaults, currentFunction);
(async () => { (async () => {
if (typeof runFunction !== "function") { const answer = await (async () => {
error(`Expecting ${currentFunction} to be a function`); if (typeof runFunction !== "function") {
throw new Error(`Expecting ${currentFunction} to be a function`); error(`Expecting ${currentFunction} to be a function`);
} throw new Error(`Expecting ${currentFunction} to be a function`);
const answer = await runFunction(effects, input, ...variable_args); }
setState(answer); })()
.then(() => runFunction(effects, input, ...variable_args))
.catch((e) => {
if ("error" in e) return e;
if ("error-code" in e) return e;
return {
error: safeToString(
() => e.toString(),
"Error Not able to be stringified",
),
};
});
await setState(answer);
})(); })();

View File

@@ -109,8 +109,15 @@ enum ResultType {
ErrorCode(i32, String), ErrorCode(i32, String),
Result(serde_json::Value), Result(serde_json::Value),
} }
#[derive(Clone, Default)] #[derive(Clone)]
struct AnswerState(std::sync::Arc<deno_core::parking_lot::Mutex<Value>>); struct AnswerState(mpsc::Sender<Value>);
impl AnswerState {
fn new() -> (Self, mpsc::Receiver<Value>) {
let (send, recv) = mpsc::channel(1);
(Self(send), recv)
}
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct ModsLoader { struct ModsLoader {
@@ -282,6 +289,9 @@ impl JsExecutionEnvironment {
vec![ vec![
fns::chown::decl(), fns::chown::decl(),
fns::chmod::decl(), fns::chmod::decl(),
fns::bind_local::decl(),
fns::bind_onion::decl(),
fns::chown::decl(),
fns::fetch::decl(), fns::fetch::decl(),
fns::read_file::decl(), fns::read_file::decl(),
fns::metadata::decl(), fns::metadata::decl(),
@@ -306,6 +316,7 @@ impl JsExecutionEnvironment {
fns::wait_command::decl(), fns::wait_command::decl(),
fns::sleep::decl(), fns::sleep::decl(),
fns::send_signal::decl(), fns::send_signal::decl(),
fns::set_permissions::decl(),
fns::signal_group::decl(), fns::signal_group::decl(),
fns::rsync::decl(), fns::rsync::decl(),
fns::rsync_wait::decl(), fns::rsync_wait::decl(),
@@ -321,7 +332,7 @@ impl JsExecutionEnvironment {
variable_args: Vec<serde_json::Value>, variable_args: Vec<serde_json::Value>,
) -> Result<Value, (JsError, String)> { ) -> Result<Value, (JsError, String)> {
let base_directory = self.base_directory.clone(); let base_directory = self.base_directory.clone();
let answer_state = AnswerState::default(); let (answer_state, mut receive_answer) = AnswerState::new();
let ext_answer_state = answer_state.clone(); let ext_answer_state = answer_state.clone();
let (callback_sender, callback_receiver) = mpsc::unbounded_channel(); let (callback_sender, callback_receiver) = mpsc::unbounded_channel();
let js_ctx = JsContext { let js_ctx = JsContext {
@@ -376,12 +387,18 @@ impl JsExecutionEnvironment {
Ok::<_, AnyError>(()) Ok::<_, AnyError>(())
}; };
future.await.map_err(|e| { let answer = tokio::select! {
tracing::debug!("{:?}", e); Some(x) = receive_answer.recv() => x,
(JsError::Javascript, format!("{}", e)) _ = future => {
})?; if let Some(x) = receive_answer.recv().await {
x
}
else {
serde_json::json!({"error": "JS Engine Shutdown"})
}
},
let answer = answer_state.0.lock().clone(); };
Ok(answer) Ok(answer)
} }
} }
@@ -399,10 +416,10 @@ impl<'a> Future for RuntimeEventLoop<'a> {
) -> std::task::Poll<Self::Output> { ) -> std::task::Poll<Self::Output> {
let this = self.project(); let this = self.project();
if let Poll::Ready(Some((uuid, value))) = this.callback.poll_recv(cx) { if let Poll::Ready(Some((uuid, value))) = this.callback.poll_recv(cx) {
match this match this.runtime.execute_script(
.runtime "callback",
.execute_script("callback", &format!("runCallback({uuid}, {value})")) &format!("globalThis.runCallback(\"{uuid}\", {value})"),
{ ) {
Ok(_) => (), Ok(_) => (),
Err(e) => return Poll::Ready(Err(e)), Err(e) => return Poll::Ready(Err(e)),
} }
@@ -440,7 +457,10 @@ mod fns {
OutputParams, OutputStrategy, ProcessGroupId, ProcessId, RunCommand, RunCommandParams, OutputParams, OutputStrategy, ProcessGroupId, ProcessId, RunCommand, RunCommandParams,
SendSignal, SendSignalParams, SignalGroup, SignalGroupParams, SendSignal, SendSignalParams, SignalGroup, SignalGroupParams,
}; };
use helpers::{to_tmp_path, AtomicFile, Rsync, RsyncOptions, RuntimeDropped}; use helpers::{
to_tmp_path, AddressSchemaLocal, AddressSchemaOnion, AtomicFile, Rsync, RsyncOptions,
RuntimeDropped,
};
use itertools::Itertools; use itertools::Itertools;
use models::{PackageId, VolumeId}; use models::{PackageId, VolumeId};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -751,7 +771,7 @@ mod fns {
volume_path.to_string_lossy(), volume_path.to_string_lossy(),
); );
} }
if let Err(_) = tokio::fs::metadata(&src).await { if tokio::fs::metadata(&src).await.is_err() {
bail!("Source at {} does not exists", src.to_string_lossy()); bail!("Source at {} does not exists", src.to_string_lossy());
} }
@@ -813,6 +833,39 @@ mod fns {
Ok(progress) Ok(progress)
} }
#[op] #[op]
async fn set_permissions(
state: Rc<RefCell<OpState>>,
volume_id: VolumeId,
path_in: PathBuf,
readonly: bool,
) -> Result<(), AnyError> {
let (volumes, volume_path) = {
let state = state.borrow();
let ctx: &JsContext = state.borrow();
let volume_path = ctx
.volumes
.path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &volume_id)
.ok_or_else(|| anyhow!("There is no {} in volumes", volume_id))?;
(ctx.volumes.clone(), volume_path)
};
if volumes.readonly(&volume_id) {
bail!("Volume {} is readonly", volume_id);
}
let new_file = volume_path.join(path_in);
// With the volume check
if !is_subset(&volume_path, &new_file).await? {
bail!(
"Path '{}' has broken away from parent '{}'",
new_file.to_string_lossy(),
volume_path.to_string_lossy(),
);
}
let mut perms = tokio::fs::metadata(&new_file).await?.permissions();
perms.set_readonly(readonly);
tokio::fs::set_permissions(new_file, perms).await?;
Ok(())
}
#[op]
async fn remove_file( async fn remove_file(
state: Rc<RefCell<OpState>>, state: Rc<RefCell<OpState>>,
volume_id: VolumeId, volume_id: VolumeId,
@@ -1039,8 +1092,10 @@ mod fns {
#[op] #[op]
async fn log_trace(state: Rc<RefCell<OpState>>, input: String) -> Result<(), AnyError> { async fn log_trace(state: Rc<RefCell<OpState>>, input: String) -> Result<(), AnyError> {
let state = state.borrow(); let ctx = {
let ctx = state.borrow::<JsContext>().clone(); let state = state.borrow();
state.borrow::<JsContext>().clone()
};
if let Some(rpc_client) = ctx.container_rpc_client { if let Some(rpc_client) = ctx.container_rpc_client {
return rpc_client return rpc_client
.request( .request(
@@ -1063,8 +1118,10 @@ mod fns {
} }
#[op] #[op]
async fn log_warn(state: Rc<RefCell<OpState>>, input: String) -> Result<(), AnyError> { async fn log_warn(state: Rc<RefCell<OpState>>, input: String) -> Result<(), AnyError> {
let state = state.borrow(); let ctx = {
let ctx = state.borrow::<JsContext>().clone(); let state = state.borrow();
state.borrow::<JsContext>().clone()
};
if let Some(rpc_client) = ctx.container_rpc_client { if let Some(rpc_client) = ctx.container_rpc_client {
return rpc_client return rpc_client
.request( .request(
@@ -1087,8 +1144,10 @@ mod fns {
} }
#[op] #[op]
async fn log_error(state: Rc<RefCell<OpState>>, input: String) -> Result<(), AnyError> { async fn log_error(state: Rc<RefCell<OpState>>, input: String) -> Result<(), AnyError> {
let state = state.borrow(); let ctx = {
let ctx = state.borrow::<JsContext>().clone(); let state = state.borrow();
state.borrow::<JsContext>().clone()
};
if let Some(rpc_client) = ctx.container_rpc_client { if let Some(rpc_client) = ctx.container_rpc_client {
return rpc_client return rpc_client
.request( .request(
@@ -1111,8 +1170,10 @@ mod fns {
} }
#[op] #[op]
async fn log_debug(state: Rc<RefCell<OpState>>, input: String) -> Result<(), AnyError> { async fn log_debug(state: Rc<RefCell<OpState>>, input: String) -> Result<(), AnyError> {
let state = state.borrow(); let ctx = {
let ctx = state.borrow::<JsContext>().clone(); let state = state.borrow();
state.borrow::<JsContext>().clone()
};
if let Some(rpc_client) = ctx.container_rpc_client { if let Some(rpc_client) = ctx.container_rpc_client {
return rpc_client return rpc_client
.request( .request(
@@ -1135,8 +1196,10 @@ mod fns {
} }
#[op] #[op]
async fn log_info(state: Rc<RefCell<OpState>>, input: String) -> Result<(), AnyError> { async fn log_info(state: Rc<RefCell<OpState>>, input: String) -> Result<(), AnyError> {
let state = state.borrow(); let ctx = {
let ctx = state.borrow::<JsContext>().clone(); let state = state.borrow();
state.borrow::<JsContext>().clone()
};
if let Some(rpc_client) = ctx.container_rpc_client { if let Some(rpc_client) = ctx.container_rpc_client {
return rpc_client return rpc_client
.request( .request(
@@ -1169,9 +1232,16 @@ mod fns {
Ok(ctx.variable_args.clone()) Ok(ctx.variable_args.clone())
} }
#[op] #[op]
fn set_value(state: &mut OpState, value: Value) -> Result<(), AnyError> { async fn set_value(state: Rc<RefCell<OpState>>, value: Value) -> Result<(), AnyError> {
let mut answer = state.borrow::<AnswerState>().0.lock(); let sender = {
*answer = value; let state = state.borrow();
let answer_state = state.borrow::<AnswerState>().0.clone();
answer_state
};
sender
.send(value)
.await
.map_err(|_e| anyhow!("Could not set a value"))?;
Ok(()) Ok(())
} }
#[op] #[op]
@@ -1424,28 +1494,54 @@ mod fns {
service_id: PackageId, service_id: PackageId,
path: String, path: String,
callback: String, callback: String,
) -> Result<ResultType, AnyError> { ) -> Result<Value, AnyError> {
let state = state.borrow(); let (sender, os) = {
let ctx = state.borrow::<JsContext>(); let state = state.borrow();
let sender = ctx.callback_sender.clone(); let ctx = state.borrow::<JsContext>();
Ok( (ctx.callback_sender.clone(), ctx.os.clone())
match ctx };
.os os.get_service_config(
.get_service_config( service_id,
service_id, &path,
&path, Box::new(move |value| {
Box::new(move |value| { sender
sender .send((callback.clone(), value))
.send((callback.clone(), value)) .map_err(|_| RuntimeDropped)
.map_err(|_| RuntimeDropped) }),
}),
)
.await
{
Ok(a) => ResultType::Result(a),
Err(e) => ResultType::ErrorCode(e.kind as i32, e.source.to_string()),
},
) )
.await
.map_err(|e| anyhow!("Couldn't get service config: {e:?}"))
}
#[op]
async fn bind_onion(
state: Rc<RefCell<OpState>>,
internal_port: u16,
address_schema: AddressSchemaOnion,
) -> Result<helpers::Address, AnyError> {
let os = {
let state = state.borrow();
let ctx = state.borrow::<JsContext>();
ctx.os.clone()
};
os.bind_onion(internal_port, address_schema)
.await
.map_err(|e| anyhow!("{e:?}"))
}
#[op]
async fn bind_local(
state: Rc<RefCell<OpState>>,
internal_port: u16,
address_schema: AddressSchemaLocal,
) -> Result<helpers::Address, AnyError> {
let os = {
let state = state.borrow();
let ctx = state.borrow::<JsContext>();
ctx.os.clone()
};
os.bind_local(internal_port, address_schema)
.await
.map_err(|e| anyhow!("{e:?}"))
} }
/// We need to make sure that during the file accessing, we don't reach beyond our scope of control /// We need to make sure that during the file accessing, we don't reach beyond our scope of control