Feature/registry package index (#2623)

* include system images in compat s9pk

* wip

* wip

* update types

* wip

* fix signature serialization

* Add SignatureHeader conversions

* finish display impl for get

---------

Co-authored-by: Shadowy Super Coder <musashidisciple@proton.me>
This commit is contained in:
Aiden McClelland
2024-05-31 12:13:23 -06:00
committed by GitHub
parent 0ccbb52c1f
commit fd7c2fbe93
113 changed files with 3265 additions and 1436 deletions

View File

@@ -140,7 +140,6 @@ install: $(ALL_TARGETS)
$(call mkdir,$(DESTDIR)/usr/lib/startos/system-images)
$(call cp,system-images/compat/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/compat.tar)
$(call cp,system-images/utils/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/utils.tar)
$(call cp,system-images/binfmt/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/binfmt.tar)
$(call cp,firmware/$(PLATFORM),$(DESTDIR)/usr/lib/startos/firmware)
@@ -184,7 +183,7 @@ container-runtime/node_modules: container-runtime/package.json container-runtime
npm --prefix container-runtime ci
touch container-runtime/node_modules
sdk/lib/osBindings: $(shell core/startos/bindings)
sdk/lib/osBindings: core/startos/bindings
mkdir -p sdk/lib/osBindings
ls core/startos/bindings/*.ts | sed 's/core\/startos\/bindings\/\([^.]*\)\.ts/export { \1 } from ".\/\1";/g' > core/startos/bindings/index.ts
npm --prefix sdk exec -- prettier --config ./sdk/package.json -w ./core/startos/bindings/*.ts

13
core/Cargo.lock generated
View File

@@ -1153,10 +1153,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0"
dependencies = [
"const-oid",
"der_derive",
"pem-rfc7468",
"zeroize",
]
[[package]]
name = "der_derive"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fe87ce4529967e0ba1dcf8450bab64d97dfd5010a6256187ffe2e43e6f0e049"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.60",
]
[[package]]
name = "deranged"
version = "0.3.11"
@@ -4603,6 +4615,7 @@ dependencies = [
"cookie 0.18.1",
"cookie_store",
"current_platform",
"der",
"digest 0.10.7",
"divrem",
"ed25519 2.2.3",

View File

@@ -1,10 +1,14 @@
use std::path::{Path, PathBuf};
use models::{PackageId, Version};
use models::{PackageId, VersionString};
pub const PKG_SCRIPT_DIR: &str = "package-data/scripts";
pub fn script_dir<P: AsRef<Path>>(datadir: P, pkg_id: &PackageId, version: &Version) -> PathBuf {
pub fn script_dir<P: AsRef<Path>>(
datadir: P,
pkg_id: &PackageId,
version: &VersionString,
) -> PathBuf {
datadir
.as_ref()
.join(&*PKG_SCRIPT_DIR)

View File

@@ -89,6 +89,7 @@ pub enum ErrorKind {
Timeout = 71,
Lxc = 72,
Cancelled = 73,
Git = 74,
}
impl ErrorKind {
pub fn as_str(&self) -> &'static str {
@@ -167,6 +168,7 @@ impl ErrorKind {
Timeout => "Timeout Error",
Lxc => "LXC Error",
Cancelled => "Cancelled",
Git => "Git Error",
}
}
}

View File

@@ -5,7 +5,7 @@ use std::str::FromStr;
use serde::{Deserialize, Deserializer, Serialize};
use ts_rs::TS;
use crate::{Id, InvalidId, PackageId, Version};
use crate::{Id, InvalidId, PackageId, VersionString};
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)]
#[ts(type = "string")]
@@ -21,7 +21,7 @@ impl std::fmt::Display for ImageId {
}
}
impl ImageId {
pub fn for_package(&self, pkg_id: &PackageId, pkg_version: Option<&Version>) -> String {
pub fn for_package(&self, pkg_id: &PackageId, pkg_version: Option<&VersionString>) -> String {
format!(
"start9/{}/{}:{}",
pkg_id,

View File

@@ -6,12 +6,12 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer};
use ts_rs::TS;
#[derive(Debug, Clone, TS)]
#[ts(type = "string")]
pub struct Version {
#[ts(type = "string", rename = "Version")]
pub struct VersionString {
version: emver::Version,
string: String,
}
impl Version {
impl VersionString {
pub fn as_str(&self) -> &str {
self.string.as_str()
}
@@ -19,76 +19,76 @@ impl Version {
self.version
}
}
impl std::fmt::Display for Version {
impl std::fmt::Display for VersionString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.string)
}
}
impl std::str::FromStr for Version {
impl std::str::FromStr for VersionString {
type Err = <emver::Version as FromStr>::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Version {
Ok(VersionString {
string: s.to_owned(),
version: s.parse()?,
})
}
}
impl From<emver::Version> for Version {
impl From<emver::Version> for VersionString {
fn from(v: emver::Version) -> Self {
Version {
VersionString {
string: v.to_string(),
version: v,
}
}
}
impl From<Version> for emver::Version {
fn from(v: Version) -> Self {
impl From<VersionString> for emver::Version {
fn from(v: VersionString) -> Self {
v.version
}
}
impl Default for Version {
impl Default for VersionString {
fn default() -> Self {
Self::from(emver::Version::default())
}
}
impl Deref for Version {
impl Deref for VersionString {
type Target = emver::Version;
fn deref(&self) -> &Self::Target {
&self.version
}
}
impl AsRef<emver::Version> for Version {
impl AsRef<emver::Version> for VersionString {
fn as_ref(&self) -> &emver::Version {
&self.version
}
}
impl AsRef<str> for Version {
impl AsRef<str> for VersionString {
fn as_ref(&self) -> &str {
self.as_str()
}
}
impl PartialEq for Version {
fn eq(&self, other: &Version) -> bool {
impl PartialEq for VersionString {
fn eq(&self, other: &VersionString) -> bool {
self.version.eq(&other.version)
}
}
impl Eq for Version {}
impl PartialOrd for Version {
impl Eq for VersionString {}
impl PartialOrd for VersionString {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.version.partial_cmp(&other.version)
}
}
impl Ord for Version {
impl Ord for VersionString {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.version.cmp(&other.version)
}
}
impl Hash for Version {
impl Hash for VersionString {
fn hash<H: Hasher>(&self, state: &mut H) {
self.version.hash(state)
}
}
impl<'de> Deserialize<'de> for Version {
impl<'de> Deserialize<'de> for VersionString {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
@@ -98,7 +98,7 @@ impl<'de> Deserialize<'de> for Version {
Ok(Self { string, version })
}
}
impl Serialize for Version {
impl Serialize for VersionString {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,

View File

@@ -73,6 +73,7 @@ console-subscriber = { version = "0.2", optional = true }
cookie = "0.18.0"
cookie_store = "0.20.0"
current_platform = "0.2.0"
der = { version = "0.7.9", features = ["derive", "pem"] }
digest = "0.10.7"
divrem = "1.0.0"
ed25519 = { version = "2.2.3", features = ["pkcs8", "pem", "alloc"] }

View File

@@ -29,7 +29,7 @@ use crate::util::clap::FromStrParser;
use crate::util::serde::{
deserialize_from_str, display_serializable, serialize_display, HandlerExtSerde, WithIoFormat,
};
use crate::util::Version;
use crate::util::VersionString;
pub mod cifs;
@@ -194,7 +194,7 @@ pub async fn list(ctx: RpcContext) -> Result<BTreeMap<BackupTargetId, BackupTarg
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct BackupInfo {
pub version: Version,
pub version: VersionString,
pub timestamp: Option<DateTime<Utc>>,
pub package_backups: BTreeMap<PackageId, PackageBackupInfo>,
}
@@ -203,8 +203,8 @@ pub struct BackupInfo {
#[serde(rename_all = "camelCase")]
pub struct PackageBackupInfo {
pub title: String,
pub version: Version,
pub os_version: Version,
pub version: VersionString,
pub os_version: VersionString,
pub timestamp: DateTime<Utc>,
}

View File

@@ -21,7 +21,7 @@ use crate::context::config::{local_config_path, ClientConfig};
use crate::context::{DiagnosticContext, InstallContext, RpcContext, SetupContext};
use crate::middleware::auth::LOCAL_AUTH_COOKIE_PATH;
use crate::prelude::*;
use crate::rpc_continuations::RequestGuid;
use crate::rpc_continuations::Guid;
#[derive(Debug)]
pub struct CliContextSeed {
@@ -164,7 +164,7 @@ impl CliContext {
pub async fn ws_continuation(
&self,
guid: RequestGuid,
guid: Guid,
) -> Result<WebSocketStream<MaybeTlsStream<TcpStream>>, Error> {
let mut url = self.base_url.clone();
let ws_scheme = match url.scheme() {
@@ -194,7 +194,7 @@ impl CliContext {
pub async fn rest_continuation(
&self,
guid: RequestGuid,
guid: Guid,
body: reqwest::Body,
headers: reqwest::header::HeaderMap,
) -> Result<reqwest::Response, Error> {

View File

@@ -21,7 +21,7 @@ use crate::net::utils::{get_iface_ipv4_addr, get_iface_ipv6_addr};
use crate::prelude::*;
use crate::progress::FullProgress;
use crate::util::cpupower::Governor;
use crate::util::Version;
use crate::util::VersionString;
use crate::version::{Current, VersionT};
use crate::{ARCH, PLATFORM};
@@ -109,8 +109,7 @@ pub struct ServerInfo {
pub platform: InternedString,
pub id: String,
pub hostname: String,
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
#[ts(type = "string | null")]
pub last_backup: Option<DateTime<Utc>>,
#[ts(type = "string")]
@@ -136,7 +135,7 @@ pub struct ServerInfo {
#[serde(default)]
pub zram: bool,
pub governor: Option<Governor>,
pub smtp: Option<String>
pub smtp: Option<String>,
}
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]

View File

@@ -20,7 +20,7 @@ use super::mount::guard::TmpMountGuard;
use crate::disk::mount::guard::GenericMountGuard;
use crate::disk::OsPartitionInfo;
use crate::util::serde::IoFormat;
use crate::util::{Invoke, Version};
use crate::util::{Invoke, VersionString};
use crate::{Error, ResultExt as _};
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
@@ -56,7 +56,7 @@ pub struct PartitionInfo {
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct EmbassyOsRecoveryInfo {
pub version: Version,
pub version: VersionString,
pub full: bool,
pub password_hash: Option<String>,
pub wrapped_key: Option<String>,

View File

@@ -1,4 +1,5 @@
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
use clap::builder::ValueParserFactory;
@@ -22,7 +23,7 @@ use crate::context::{CliContext, RpcContext};
use crate::db::model::package::{ManifestPreference, PackageState, PackageStateMatchModelRef};
use crate::prelude::*;
use crate::progress::{FullProgress, PhasedProgressBar};
use crate::rpc_continuations::{RequestGuid, RpcContinuation};
use crate::rpc_continuations::{Guid, RpcContinuation};
use crate::s9pk::manifest::PackageId;
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::S9pk;
@@ -139,15 +140,18 @@ pub async fn install(
let registry = registry.unwrap_or_else(|| crate::DEFAULT_MARKETPLACE.parse().unwrap());
let version_priority = version_priority.unwrap_or_default();
let s9pk = S9pk::deserialize(
&HttpSource::new(
ctx.client.clone(),
format!(
"{}/package/v0/{}.s9pk?spec={}&version-priority={}",
registry, id, version, version_priority,
&Arc::new(
HttpSource::new(
ctx.client.clone(),
format!(
"{}/package/v0/{}.s9pk?spec={}&version-priority={}",
registry, id, version, version_priority,
)
.parse()?,
)
.parse()?,
)
.await?,
.await?,
),
None, // TODO
true,
)
.await?;
@@ -170,8 +174,8 @@ pub async fn install(
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SideloadResponse {
pub upload: RequestGuid,
pub progress: RequestGuid,
pub upload: Guid,
pub progress: Guid,
}
#[instrument(skip_all)]
@@ -179,7 +183,7 @@ pub async fn sideload(ctx: RpcContext) -> Result<SideloadResponse, Error> {
let (upload, file) = upload(&ctx).await?;
let (id_send, id_recv) = oneshot::channel();
let (err_send, err_recv) = oneshot::channel();
let progress = RequestGuid::new();
let progress = Guid::new();
let db = ctx.db.clone();
let mut sub = db
.subscribe(
@@ -256,7 +260,11 @@ pub async fn sideload(ctx: RpcContext) -> Result<SideloadResponse, Error> {
.await;
tokio::spawn(async move {
if let Err(e) = async {
let s9pk = S9pk::deserialize(&file, true).await?;
let s9pk = S9pk::deserialize(
&file, None, // TODO
true,
)
.await?;
let _ = id_send.send(s9pk.as_manifest().id.clone());
ctx.services
.install(ctx.clone(), s9pk, None::<Never>)

View File

@@ -1,6 +1,8 @@
pub const DEFAULT_MARKETPLACE: &str = "https://registry.start9.com";
// pub const COMMUNITY_MARKETPLACE: &str = "https://community-registry.start9.com";
pub const BUFFER_SIZE: usize = 1024;
pub const CAP_1_KiB: usize = 1024;
pub const CAP_1_MiB: usize = CAP_1_KiB * CAP_1_KiB;
pub const CAP_10_MiB: usize = 10 * CAP_1_MiB;
pub const HOST_IP: [u8; 4] = [172, 18, 0, 1];
pub const TARGET: &str = current_platform::CURRENT_PLATFORM;
lazy_static::lazy_static! {

View File

@@ -26,7 +26,7 @@ use crate::context::{CliContext, RpcContext};
use crate::error::ResultExt;
use crate::lxc::ContainerId;
use crate::prelude::*;
use crate::rpc_continuations::{RequestGuid, RpcContinuation, RpcContinuations};
use crate::rpc_continuations::{Guid, RpcContinuation, RpcContinuations};
use crate::util::serde::Reversible;
use crate::util::Invoke;
@@ -118,7 +118,7 @@ pub struct LogResponse {
#[serde(rename_all = "camelCase")]
pub struct LogFollowResponse {
start_cursor: Option<String>,
guid: RequestGuid,
guid: Guid,
}
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
@@ -581,7 +581,7 @@ pub async fn follow_logs<Context: AsRef<RpcContinuations>>(
first_entry = Some(entry);
}
let guid = RequestGuid::new();
let guid = Guid::new();
ctx.as_ref()
.add(
guid.clone(),

View File

@@ -33,7 +33,7 @@ use crate::disk::mount::filesystem::{MountType, ReadWrite};
use crate::disk::mount::guard::{GenericMountGuard, MountGuard, TmpMountGuard};
use crate::disk::mount::util::unmount;
use crate::prelude::*;
use crate::rpc_continuations::{RequestGuid, RpcContinuation};
use crate::rpc_continuations::{Guid, RpcContinuation};
use crate::util::clap::FromStrParser;
use crate::util::rpc_client::UnixRpcClient;
use crate::util::{new_guid, Invoke};
@@ -433,7 +433,7 @@ pub struct ConnectParams {
pub async fn connect_rpc(
ctx: RpcContext,
ConnectParams { guid }: ConnectParams,
) -> Result<RequestGuid, Error> {
) -> Result<Guid, Error> {
connect(
&ctx,
ctx.dev.lxc.lock().await.get(&guid).ok_or_else(|| {
@@ -443,11 +443,11 @@ pub async fn connect_rpc(
.await
}
pub async fn connect(ctx: &RpcContext, container: &LxcContainer) -> Result<RequestGuid, Error> {
pub async fn connect(ctx: &RpcContext, container: &LxcContainer) -> Result<Guid, Error> {
use axum::extract::ws::Message;
let rpc = container.connect_rpc(Some(Duration::from_secs(30))).await?;
let guid = RequestGuid::new();
let guid = Guid::new();
ctx.rpc_continuations
.add(
guid.clone(),
@@ -504,7 +504,7 @@ pub async fn connect(ctx: &RpcContext, container: &LxcContainer) -> Result<Reque
Ok(guid)
}
pub async fn connect_cli(ctx: &CliContext, guid: RequestGuid) -> Result<(), Error> {
pub async fn connect_cli(ctx: &CliContext, guid: Guid) -> Result<(), Error> {
use futures::SinkExt;
use tokio_tungstenite::tungstenite::Message;

View File

@@ -30,7 +30,7 @@ use crate::middleware::auth::{Auth, HasValidSession};
use crate::middleware::cors::Cors;
use crate::middleware::db::SyncDb;
use crate::middleware::diagnostic::DiagnosticMode;
use crate::rpc_continuations::RequestGuid;
use crate::rpc_continuations::Guid;
use crate::{diagnostic_api, install_api, main_api, setup_api, Error, ErrorKind, ResultExt};
const NOT_FOUND: &[u8] = b"Not Found";
@@ -136,7 +136,7 @@ pub fn main_ui_server_router(ctx: RpcContext) -> Router {
let ctx = ctx.clone();
move |x::Path(path): x::Path<String>,
ws: axum::extract::ws::WebSocketUpgrade| async move {
match RequestGuid::from(&path) {
match Guid::from(&path) {
None => {
tracing::debug!("No Guid Path");
bad_request()
@@ -159,7 +159,7 @@ pub fn main_ui_server_router(ctx: RpcContext) -> Router {
.path()
.strip_prefix("/rest/rpc/")
.unwrap_or_default();
match RequestGuid::from(&path) {
match Guid::from(&path) {
None => {
tracing::debug!("No Guid Path");
bad_request()

View File

@@ -10,10 +10,11 @@ use ts_rs::TS;
use crate::context::CliContext;
use crate::prelude::*;
use crate::registry::context::RegistryContext;
use crate::registry::signer::{ContactInfo, SignerInfo, SignerKey};
use crate::registry::signer::sign::AnyVerifyingKey;
use crate::registry::signer::{ContactInfo, SignerInfo};
use crate::registry::RegistryDatabase;
use crate::rpc_continuations::RequestGuid;
use crate::util::serde::{display_serializable, HandlerExtSerde, Pem, WithIoFormat};
use crate::rpc_continuations::Guid;
use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat};
pub fn admin_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
@@ -48,8 +49,8 @@ fn signers_api<C: Context>() -> ParentHandler<C> {
.subcommand("add", from_fn_async(cli_add_signer).no_display())
}
impl Model<BTreeMap<RequestGuid, SignerInfo>> {
pub fn get_signer(&self, key: &SignerKey) -> Result<RequestGuid, Error> {
impl Model<BTreeMap<Guid, SignerInfo>> {
pub fn get_signer(&self, key: &AnyVerifyingKey) -> Result<Guid, Error> {
self.as_entries()?
.into_iter()
.map(|(guid, s)| Ok::<_, Error>((guid, s.as_keys().de()?)))
@@ -60,7 +61,7 @@ impl Model<BTreeMap<RequestGuid, SignerInfo>> {
.ok_or_else(|| Error::new(eyre!("unknown signer"), ErrorKind::Authorization))
}
pub fn get_signer_info(&self, key: &SignerKey) -> Result<(RequestGuid, SignerInfo), Error> {
pub fn get_signer_info(&self, key: &AnyVerifyingKey) -> Result<(Guid, SignerInfo), Error> {
self.as_entries()?
.into_iter()
.map(|(guid, s)| Ok::<_, Error>((guid, s.de()?)))
@@ -88,17 +89,15 @@ impl Model<BTreeMap<RequestGuid, SignerInfo>> {
ErrorKind::InvalidRequest,
));
}
self.insert(&RequestGuid::new(), signer)
self.insert(&Guid::new(), signer)
}
}
pub async fn list_signers(
ctx: RegistryContext,
) -> Result<BTreeMap<RequestGuid, SignerInfo>, Error> {
pub async fn list_signers(ctx: RegistryContext) -> Result<BTreeMap<Guid, SignerInfo>, Error> {
ctx.db.peek().await.into_index().into_signers().de()
}
pub fn display_signers<T>(params: WithIoFormat<T>, signers: BTreeMap<RequestGuid, SignerInfo>) {
pub fn display_signers<T>(params: WithIoFormat<T>, signers: BTreeMap<Guid, SignerInfo>) {
use prettytable::*;
if let Some(format) = params.format {
@@ -137,8 +136,8 @@ pub struct CliAddSignerParams {
pub name: String,
#[arg(long = "contact", short = 'c')]
pub contact: Vec<ContactInfo>,
#[arg(long = "ed25519-key")]
pub ed25519_keys: Vec<Pem<ed25519_dalek::VerifyingKey>>,
#[arg(long = "key")]
pub keys: Vec<AnyVerifyingKey>,
pub database: Option<PathBuf>,
}
@@ -151,7 +150,7 @@ pub async fn cli_add_signer(
CliAddSignerParams {
name,
contact,
ed25519_keys,
keys,
database,
},
..
@@ -160,7 +159,7 @@ pub async fn cli_add_signer(
let signer = SignerInfo {
name,
contact,
keys: ed25519_keys.into_iter().map(SignerKey::Ed25519).collect(),
keys: keys.into_iter().collect(),
};
if let Some(database) = database {
TypedPatchDb::<RegistryDatabase>::load(PatchDb::open(database).await?)
@@ -181,8 +180,7 @@ pub async fn cli_add_signer(
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct AddAdminParams {
#[ts(type = "string")]
pub signer: RequestGuid,
pub signer: Guid,
}
pub async fn add_admin(
@@ -206,7 +204,7 @@ pub async fn add_admin(
#[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")]
pub struct CliAddAdminParams {
pub signer: RequestGuid,
pub signer: Guid,
pub database: Option<PathBuf>,
}
@@ -242,7 +240,7 @@ pub async fn cli_add_admin(
Ok(())
}
pub async fn list_admins(ctx: RegistryContext) -> Result<BTreeMap<RequestGuid, SignerInfo>, Error> {
pub async fn list_admins(ctx: RegistryContext) -> Result<BTreeMap<Guid, SignerInfo>, Error> {
let db = ctx.db.peek().await;
let admins = db.as_admins().de()?;
Ok(db

View File

@@ -1,3 +1,5 @@
use std::collections::HashMap;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use tokio::io::AsyncWrite;
@@ -5,32 +7,48 @@ use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::registry::signer::{AcceptSigners, FileValidator, SignatureInfo};
use crate::registry::signer::commitment::{Commitment, Digestable};
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey};
use crate::registry::signer::AcceptSigners;
use crate::s9pk::merkle_archive::source::http::HttpSource;
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct RegistryAsset {
pub struct RegistryAsset<Commitment> {
#[ts(type = "string")]
pub url: Url,
pub signature_info: SignatureInfo,
pub commitment: Commitment,
pub signatures: HashMap<AnyVerifyingKey, AnySignature>,
}
impl AsRef<RegistryAsset> for RegistryAsset {
fn as_ref(&self) -> &RegistryAsset {
self
impl<Commitment> RegistryAsset<Commitment> {
pub fn all_signers(&self) -> AcceptSigners {
AcceptSigners::All(
self.signatures
.keys()
.cloned()
.map(AcceptSigners::Signer)
.collect(),
)
}
}
impl RegistryAsset {
pub fn validate(&self, accept: AcceptSigners) -> Result<FileValidator, Error> {
self.signature_info.validate(accept)
impl<Commitment: Digestable> RegistryAsset<Commitment> {
pub fn validate(&self, context: &str, mut accept: AcceptSigners) -> Result<&Commitment, Error> {
for (signer, signature) in &self.signatures {
accept.process_signature(signer, &self.commitment, context, signature)?;
}
accept.try_accept()?;
Ok(&self.commitment)
}
}
impl<C: for<'a> Commitment<&'a HttpSource>> RegistryAsset<C> {
pub async fn download(
&self,
client: Client,
dst: &mut (impl AsyncWrite + Unpin + Send + ?Sized),
validator: &FileValidator,
) -> Result<(), Error> {
validator.download(self.url.clone(), client, dst).await
self.commitment
.copy_to(&HttpSource::new(client, self.url.clone()).await?, dst)
.await
}
}

View File

@@ -6,19 +6,23 @@ use axum::body::Body;
use axum::extract::Request;
use axum::response::Response;
use chrono::Utc;
use http_body_util::BodyExt;
use http::HeaderValue;
use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{Middleware, RpcRequest, RpcResponse};
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha512};
use tokio::io::AsyncWriteExt;
use tokio::sync::Mutex;
use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::registry::context::RegistryContext;
use crate::registry::signer::SignerKey;
use crate::util::serde::{Base64, Pem};
use crate::registry::signer::commitment::request::RequestCommitment;
use crate::registry::signer::commitment::Commitment;
use crate::registry::signer::sign::{
AnySignature, AnySigningKey, AnyVerifyingKey, SignatureScheme,
};
use crate::util::serde::Base64;
pub const AUTH_SIG_HEADER: &str = "X-StartOS-Registry-Auth-Sig";
@@ -34,7 +38,7 @@ pub struct Metadata {
#[derive(Clone)]
pub struct Auth {
nonce_cache: Arc<Mutex<BTreeMap<Instant, u64>>>, // for replay protection
signer: Option<Result<SignerKey, RpcError>>,
signer: Option<Result<AnyVerifyingKey, RpcError>>,
}
impl Auth {
pub fn new() -> Self {
@@ -68,41 +72,57 @@ pub struct RegistryAdminLogRecord {
pub name: String,
#[ts(type = "{ id: string | number | null; method: string; params: any }")]
pub request: RpcRequest,
pub key: SignerKey,
pub key: AnyVerifyingKey,
}
#[derive(Serialize, Deserialize)]
pub struct SignatureHeader {
pub timestamp: i64,
pub nonce: u64,
#[serde(flatten)]
pub signer: SignerKey,
pub signature: Base64<[u8; 64]>,
pub commitment: RequestCommitment,
pub signer: AnyVerifyingKey,
pub signature: AnySignature,
}
impl SignatureHeader {
pub fn sign_ed25519(
key: &ed25519_dalek::SigningKey,
body: &[u8],
context: &str,
) -> Result<Self, Error> {
pub fn to_header(&self) -> HeaderValue {
let mut url: Url = "http://localhost".parse().unwrap();
self.commitment.append_query(&mut url);
url.query_pairs_mut()
.append_pair("signer", &self.signer.to_string());
url.query_pairs_mut()
.append_pair("signature", &self.signature.to_string());
HeaderValue::from_str(url.query().unwrap_or_default()).unwrap()
}
pub fn from_header(header: &HeaderValue) -> Result<Self, Error> {
let url: Url = format!(
"http://localhost/?{}",
header.to_str().with_kind(ErrorKind::Utf8)?
)
.parse()?;
let query: BTreeMap<_, _> = url.query_pairs().collect();
Ok(Self {
commitment: RequestCommitment::from_query(&url)?,
signer: query.get("signer").or_not_found("signer")?.parse()?,
signature: query.get("signature").or_not_found("signature")?.parse()?,
})
}
pub fn sign(signer: &AnySigningKey, body: &[u8], context: &str) -> Result<Self, Error> {
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs() as i64)
.unwrap_or_else(|e| e.duration().as_secs() as i64 * -1);
let nonce = rand::random();
let signer = SignerKey::Ed25519(Pem(key.verifying_key()));
let mut hasher = Sha512::new();
hasher.update(&i64::to_be_bytes(timestamp));
hasher.update(&u64::to_be_bytes(nonce));
hasher.update(body);
let signature = Base64(
key.sign_prehashed(hasher, Some(context.as_bytes()))?
.to_bytes(),
);
Ok(Self {
let commitment = RequestCommitment {
timestamp,
nonce,
signer,
size: body.len() as u64,
blake3: Base64(*blake3::hash(body).as_bytes()),
};
let signature = signer
.scheme()
.sign_commitment(&signer, &commitment, context)?;
Ok(Self {
commitment,
signer: signer.verifying_key(),
signature,
})
}
@@ -120,43 +140,40 @@ impl Middleware<RegistryContext> for Auth {
async {
let request = request;
let SignatureHeader {
timestamp,
nonce,
commitment,
signer,
signature,
} = serde_urlencoded::from_str(
} = SignatureHeader::from_header(
request
.headers()
.get(AUTH_SIG_HEADER)
.or_not_found("missing X-StartOS-Registry-Auth-Sig")
.with_kind(ErrorKind::InvalidRequest)?
.to_str()
.with_kind(ErrorKind::Utf8)?,
)
.with_kind(ErrorKind::Deserialization)?;
.with_kind(ErrorKind::InvalidRequest)?,
)?;
signer.scheme().verify_commitment(
&signer,
&commitment,
&ctx.hostname,
&signature,
)?;
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs() as i64)
.unwrap_or_else(|e| e.duration().as_secs() as i64 * -1);
if (now - timestamp).abs() > 30 {
if (now - commitment.timestamp).abs() > 30 {
return Err(Error::new(
eyre!("timestamp not within 30s of now"),
ErrorKind::InvalidSignature,
));
}
self.handle_nonce(nonce).await?;
let body = std::mem::replace(request.body_mut(), Body::empty())
.collect()
.await
.with_kind(ErrorKind::Network)?
.to_bytes();
let mut verifier = signer.verifier();
verifier.update(&i64::to_be_bytes(timestamp));
verifier.update(&u64::to_be_bytes(nonce));
verifier.update(&body);
self.handle_nonce(commitment.nonce).await?;
let mut body = Vec::with_capacity(commitment.size as usize);
commitment.copy_to(request, &mut body).await?;
*request.body_mut() = Body::from(body);
verifier.verify(&*signature, &ctx.hostname)?;
Ok(signer)
}
.await

View File

@@ -6,6 +6,7 @@ use std::sync::Arc;
use clap::Parser;
use imbl_value::InternedString;
use patch_db::PatchDb;
use reqwest::{Client, Proxy};
use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{CallRemote, Context, Empty};
use serde::{Deserialize, Serialize};
@@ -17,9 +18,10 @@ use crate::context::config::{ContextConfig, CONFIG_PATH};
use crate::context::{CliContext, RpcContext};
use crate::prelude::*;
use crate::registry::auth::{SignatureHeader, AUTH_SIG_HEADER};
use crate::registry::device_info::{DeviceInfo, DEVICE_INFO_HEADER};
use crate::registry::signer::sign::AnySigningKey;
use crate::registry::RegistryDatabase;
use crate::rpc_continuations::RpcContinuations;
use crate::version::VersionT;
#[derive(Debug, Clone, Default, Deserialize, Serialize, Parser)]
#[serde(rename_all = "kebab-case")]
@@ -31,6 +33,8 @@ pub struct RegistryConfig {
pub listen: Option<SocketAddr>,
#[arg(short = 'h', long = "hostname")]
pub hostname: InternedString,
#[arg(short = 'p', long = "proxy")]
pub tor_proxy: Option<Url>,
#[arg(short = 'd', long = "datadir")]
pub datadir: Option<PathBuf>,
}
@@ -58,6 +62,7 @@ pub struct RegistryContextSeed {
pub db: TypedPatchDb<RegistryDatabase>,
pub datadir: PathBuf,
pub rpc_continuations: RpcContinuations,
pub client: Client,
pub shutdown: Sender<()>,
}
@@ -81,6 +86,11 @@ impl RegistryContext {
|| async { Ok(Default::default()) },
)
.await?;
let tor_proxy_url = config
.tor_proxy
.clone()
.map(Ok)
.unwrap_or_else(|| "socks5h://localhost:9050".parse())?;
Ok(Self(Arc::new(RegistryContextSeed {
hostname: config.hostname.clone(),
listen: config
@@ -89,6 +99,16 @@ impl RegistryContext {
db,
datadir,
rpc_continuations: RpcContinuations::new(),
client: Client::builder()
.proxy(Proxy::custom(move |url| {
if url.host_str().map_or(false, |h| h.ends_with(".onion")) {
Some(tor_proxy_url.clone())
} else {
None
}
}))
.build()
.with_kind(crate::ErrorKind::ParseUrl)?,
shutdown,
})))
}
@@ -145,12 +165,11 @@ impl CallRemote<RegistryContext> for CliContext {
.header(CONTENT_LENGTH, body.len())
.header(
AUTH_SIG_HEADER,
serde_urlencoded::to_string(&SignatureHeader::sign_ed25519(
self.developer_key()?,
SignatureHeader::sign(
&AnySigningKey::Ed25519(self.developer_key()?.clone()),
&body,
&host,
)?)
.with_kind(ErrorKind::Serialization)?,
)?.to_header(),
)
.body(body)
.send()
@@ -171,29 +190,6 @@ impl CallRemote<RegistryContext> for CliContext {
}
}
fn hardware_header(ctx: &RpcContext) -> String {
let mut url: Url = "http://localhost".parse().unwrap();
url.query_pairs_mut()
.append_pair(
"os.version",
&crate::version::Current::new().semver().to_string(),
)
.append_pair(
"os.compat",
&crate::version::Current::new().compat().to_string(),
)
.append_pair("os.arch", &*crate::PLATFORM)
.append_pair("hardware.arch", &*crate::ARCH)
.append_pair("hardware.ram", &ctx.hardware.ram.to_string());
for hw in &ctx.hardware.devices {
url.query_pairs_mut()
.append_pair(&format!("hardware.device.{}", hw.class()), hw.product());
}
url.query().unwrap_or_default().to_string()
}
impl CallRemote<RegistryContext, RegistryUrlParams> for RpcContext {
async fn call_remote(
&self,
@@ -221,7 +217,7 @@ impl CallRemote<RegistryContext, RegistryUrlParams> for RpcContext {
.header(CONTENT_TYPE, "application/json")
.header(ACCEPT, "application/json")
.header(CONTENT_LENGTH, body.len())
.header("X-StartOS-Hardware", &hardware_header(self))
.header(DEVICE_INFO_HEADER, DeviceInfo::from(self).to_header_value())
.body(body)
.send()
.await?;

View File

@@ -0,0 +1,199 @@
use std::collections::BTreeMap;
use std::convert::identity;
use std::ops::Deref;
use axum::extract::Request;
use axum::response::Response;
use emver::{Version, VersionRange};
use http::HeaderValue;
use imbl_value::InternedString;
use rpc_toolkit::{Middleware, RpcRequest, RpcResponse};
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use url::Url;
use crate::context::RpcContext;
use crate::prelude::*;
use crate::registry::context::RegistryContext;
use crate::util::VersionString;
use crate::version::VersionT;
pub const DEVICE_INFO_HEADER: &str = "X-StartOS-Device-Info";
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
pub struct DeviceInfo {
pub os: OsInfo,
pub hardware: HardwareInfo,
}
impl From<&RpcContext> for DeviceInfo {
fn from(value: &RpcContext) -> Self {
Self {
os: OsInfo::from(value),
hardware: HardwareInfo::from(value),
}
}
}
impl DeviceInfo {
pub fn to_header_value(&self) -> HeaderValue {
let mut url: Url = "http://localhost".parse().unwrap();
url.query_pairs_mut()
.append_pair("os.version", &self.os.version.to_string())
.append_pair("os.compat", &self.os.compat.to_string())
.append_pair("os.platform", &*self.os.platform)
.append_pair("hardware.arch", &*self.hardware.arch)
.append_pair("hardware.ram", &self.hardware.ram.to_string());
for (class, products) in &self.hardware.devices {
for product in products {
url.query_pairs_mut()
.append_pair(&format!("hardware.device.{}", class), product);
}
}
HeaderValue::from_str(url.query().unwrap_or_default()).unwrap()
}
pub fn from_header_value(header: &HeaderValue) -> Result<Self, Error> {
let url: Url = format!(
"http://localhost/?{}",
header.to_str().with_kind(ErrorKind::ParseUrl)?
)
.parse()?;
let query: BTreeMap<_, _> = url.query_pairs().collect();
Ok(Self {
os: OsInfo {
version: query
.get("os.version")
.or_not_found("os.version")?
.parse()?,
compat: query.get("os.compat").or_not_found("os.compat")?.parse()?,
platform: query
.get("os.platform")
.or_not_found("os.platform")?
.deref()
.into(),
},
hardware: HardwareInfo {
arch: query
.get("hardware.arch")
.or_not_found("hardware.arch")?
.parse()?,
ram: query
.get("hardware.ram")
.or_not_found("hardware.ram")?
.parse()?,
devices: identity(query)
.split_off("hardware.device.")
.into_iter()
.filter_map(|(k, v)| {
k.strip_prefix("hardware.device.")
.map(|k| (k.into(), v.into_owned()))
})
.fold(BTreeMap::new(), |mut acc, (k, v)| {
let mut devs = acc.remove(&k).unwrap_or_default();
devs.push(v);
acc.insert(k, devs);
acc
}),
},
})
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
pub struct OsInfo {
#[ts(as = "VersionString")]
pub version: Version,
#[ts(type = "string")]
pub compat: VersionRange,
#[ts(type = "string")]
pub platform: InternedString,
}
impl From<&RpcContext> for OsInfo {
fn from(_: &RpcContext) -> Self {
Self {
version: crate::version::Current::new().semver(),
compat: crate::version::Current::new().compat().clone(),
platform: InternedString::intern(&*crate::PLATFORM),
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
pub struct HardwareInfo {
#[ts(type = "string")]
pub arch: InternedString,
#[ts(type = "number")]
pub ram: u64,
#[ts(as = "BTreeMap::<String, Vec<String>>")]
pub devices: BTreeMap<InternedString, Vec<String>>,
}
impl From<&RpcContext> for HardwareInfo {
fn from(value: &RpcContext) -> Self {
Self {
arch: InternedString::intern(&**crate::ARCH),
ram: value.hardware.ram,
devices: value
.hardware
.devices
.iter()
.fold(BTreeMap::new(), |mut acc, dev| {
let mut devs = acc.remove(dev.class()).unwrap_or_default();
devs.push(dev.product().to_owned());
acc.insert(dev.class().into(), devs);
acc
}),
}
}
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Metadata {
#[serde(default)]
get_device_info: bool,
}
#[derive(Clone)]
pub struct DeviceInfoMiddleware {
device_info: Option<HeaderValue>,
}
impl DeviceInfoMiddleware {
pub fn new() -> Self {
Self { device_info: None }
}
}
impl Middleware<RegistryContext> for DeviceInfoMiddleware {
type Metadata = Metadata;
async fn process_http_request(
&mut self,
_: &RegistryContext,
request: &mut Request,
) -> Result<(), Response> {
self.device_info = request.headers_mut().remove(DEVICE_INFO_HEADER);
Ok(())
}
async fn process_rpc_request(
&mut self,
_: &RegistryContext,
metadata: Self::Metadata,
request: &mut RpcRequest,
) -> Result<(), RpcResponse> {
async move {
if metadata.get_device_info {
if let Some(device_info) = &self.device_info {
request.params["__device_info"] =
to_value(&DeviceInfo::from_header_value(device_info)?)?;
}
}
Ok::<_, Error>(())
}
.await
.map_err(|e| RpcResponse::from_result(Err(e)))
}
}

View File

@@ -3,20 +3,23 @@ use std::net::SocketAddr;
use axum::Router;
use futures::future::ready;
use models::DataUrl;
use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler, Server};
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use crate::context::{CliContext};
use crate::context::CliContext;
use crate::middleware::cors::Cors;
use crate::net::static_server::{bad_request, not_found, server_error};
use crate::net::web_server::WebServer;
use crate::prelude::*;
use crate::registry::auth::Auth;
use crate::registry::context::{RegistryContext};
use crate::registry::context::RegistryContext;
use crate::registry::device_info::DeviceInfoMiddleware;
use crate::registry::os::index::OsIndex;
use crate::registry::package::index::PackageIndex;
use crate::registry::signer::SignerInfo;
use crate::rpc_continuations::RequestGuid;
use crate::rpc_continuations::Guid;
use crate::util::serde::HandlerExtSerde;
pub mod admin;
@@ -24,26 +27,29 @@ pub mod asset;
pub mod auth;
pub mod context;
pub mod db;
pub mod device_info;
pub mod os;
pub mod package;
pub mod signer;
#[derive(Debug, Default, Deserialize, Serialize, HasModel)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
pub struct RegistryDatabase {
pub admins: BTreeSet<RequestGuid>,
pub admins: BTreeSet<Guid>,
pub index: FullIndex,
}
impl RegistryDatabase {}
#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct FullIndex {
// pub package: PackageIndex,
pub icon: Option<DataUrl<'static>>,
pub package: PackageIndex,
pub os: OsIndex,
#[ts(as = "BTreeMap::<String, SignerInfo>")]
pub signers: BTreeMap<RequestGuid, SignerInfo>,
pub signers: BTreeMap<Guid, SignerInfo>,
}
pub async fn get_full_index(ctx: RegistryContext) -> Result<FullIndex, Error> {
@@ -59,6 +65,7 @@ pub fn registry_api<C: Context>() -> ParentHandler<C> {
.with_call_remote::<CliContext>(),
)
.subcommand("os", os::os_api::<C>())
.subcommand("package", package::package_api::<C>())
.subcommand("admin", admin::admin_api::<C>())
.subcommand("db", db::db_api::<C>())
}
@@ -72,7 +79,8 @@ pub fn registry_server_router(ctx: RegistryContext) -> Router {
post(
Server::new(move || ready(Ok(ctx.clone())), registry_api())
.middleware(Cors::new())
.middleware(Auth::new()),
.middleware(Auth::new())
.middleware(DeviceInfoMiddleware::new()),
)
})
.route(
@@ -81,7 +89,7 @@ pub fn registry_server_router(ctx: RegistryContext) -> Router {
let ctx = ctx.clone();
move |x::Path(path): x::Path<String>,
ws: axum::extract::ws::WebSocketUpgrade| async move {
match RequestGuid::from(&path) {
match Guid::from(&path) {
None => {
tracing::debug!("No Guid Path");
bad_request()
@@ -104,7 +112,7 @@ pub fn registry_server_router(ctx: RegistryContext) -> Router {
.path()
.strip_prefix("/rest/rpc/")
.unwrap_or_default();
match RequestGuid::from(&path) {
match Guid::from(&path) {
None => {
tracing::debug!("No Guid Path");
bad_request()

View File

@@ -1,17 +1,13 @@
use std::collections::BTreeMap;
use std::collections::{BTreeMap, HashMap};
use std::panic::UnwindSafe;
use std::path::PathBuf;
use std::time::Duration;
use axum::response::Response;
use clap::Parser;
use futures::{FutureExt, TryStreamExt};
use helpers::NonDetachingJoinHandle;
use imbl_value::InternedString;
use itertools::Itertools;
use rpc_toolkit::{from_fn_async, Context, HandlerArgs, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha512};
use ts_rs::TS;
use url::Url;
@@ -22,10 +18,15 @@ use crate::registry::asset::RegistryAsset;
use crate::registry::context::RegistryContext;
use crate::registry::os::index::OsVersionInfo;
use crate::registry::os::SIG_CONTEXT;
use crate::registry::signer::{Blake3Ed25519Signature, Signature, SignatureInfo, SignerKey};
use crate::rpc_continuations::{RequestGuid, RpcContinuation};
use crate::registry::signer::commitment::blake3::Blake3Commitment;
use crate::registry::signer::sign::ed25519::Ed25519;
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme};
use crate::s9pk::merkle_archive::hash::VerifyingWriter;
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::{Apply, Version};
use crate::util::serde::Base64;
use crate::util::VersionString;
pub fn add_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
@@ -53,39 +54,37 @@ pub fn add_api<C: Context>() -> ParentHandler<C> {
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct AddAssetParams {
#[ts(type = "string")]
pub url: Url,
pub signature: Signature,
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
#[ts(type = "string")]
pub platform: InternedString,
#[serde(default)]
pub upload: bool,
#[ts(type = "string")]
pub url: Url,
#[serde(rename = "__auth_signer")]
pub signer: SignerKey,
#[ts(skip)]
pub signer: AnyVerifyingKey,
pub signature: AnySignature,
pub commitment: Blake3Commitment,
}
async fn add_asset(
ctx: RegistryContext,
AddAssetParams {
url,
signature,
version,
platform,
upload,
url,
signer,
signature,
commitment,
}: AddAssetParams,
accessor: impl FnOnce(&mut Model<OsVersionInfo>) -> &mut Model<BTreeMap<InternedString, RegistryAsset>>
accessor: impl FnOnce(
&mut Model<OsVersionInfo>,
) -> &mut Model<BTreeMap<InternedString, RegistryAsset<Blake3Commitment>>>
+ UnwindSafe
+ Send,
) -> Result<Option<RequestGuid>, Error> {
ensure_code!(
signature.signer() == signer,
ErrorKind::InvalidSignature,
"asset signature does not match request signer"
);
) -> Result<(), Error> {
signer
.scheme()
.verify_commitment(&signer, &commitment, SIG_CONTEXT, &signature)?;
ctx.db
.mutate(|db| {
let signer_guid = db.as_index().as_signers().get_signer(&signer)?;
@@ -95,7 +94,7 @@ async fn add_asset(
.as_versions()
.as_idx(&version)
.or_not_found(&version)?
.as_signers()
.as_authorized()
.de()?
.contains(&signer_guid)
{
@@ -109,11 +108,21 @@ async fn add_asset(
.upsert(&platform, || {
Ok(RegistryAsset {
url,
signature_info: SignatureInfo::new(SIG_CONTEXT),
commitment: commitment.clone(),
signatures: HashMap::new(),
})
})?
.as_signature_info_mut()
.mutate(|s| s.add_sig(&signature))?;
.mutate(|s| {
if s.commitment != commitment {
Err(Error::new(
eyre!("commitment does not match"),
ErrorKind::InvalidSignature,
))
} else {
s.signatures.insert(signer, signature);
Ok(())
}
})?;
Ok(())
} else {
Err(Error::new(eyre!("UNAUTHORIZED"), ErrorKind::Authorization))
@@ -121,80 +130,18 @@ async fn add_asset(
})
.await?;
let guid = if upload {
let guid = RequestGuid::new();
let auth_guid = guid.clone();
let signer = signature.signer();
let hostname = ctx.hostname.clone();
ctx.rpc_continuations
.add(
guid.clone(),
RpcContinuation::rest(
Box::new(|req| {
async move {
Ok(
if async move {
let auth_sig = base64::decode(
req.headers().get("X-StartOS-Registry-Auth-Sig")?,
)
.ok()?;
signer
.verify_message(
auth_guid.as_ref().as_bytes(),
&auth_sig,
&hostname,
)
.ok()?;
Some(())
}
.await
.is_some()
{
Response::builder()
.status(200)
.body(axum::body::Body::empty())
.with_kind(ErrorKind::Network)?
} else {
Response::builder()
.status(401)
.body(axum::body::Body::empty())
.with_kind(ErrorKind::Network)?
},
)
}
.boxed()
}),
Duration::from_secs(30),
),
)
.await;
Some(guid)
} else {
None
};
Ok(guid)
Ok(())
}
pub async fn add_iso(
ctx: RegistryContext,
params: AddAssetParams,
) -> Result<Option<RequestGuid>, Error> {
pub async fn add_iso(ctx: RegistryContext, params: AddAssetParams) -> Result<(), Error> {
add_asset(ctx, params, |m| m.as_iso_mut()).await
}
pub async fn add_img(
ctx: RegistryContext,
params: AddAssetParams,
) -> Result<Option<RequestGuid>, Error> {
pub async fn add_img(ctx: RegistryContext, params: AddAssetParams) -> Result<(), Error> {
add_asset(ctx, params, |m| m.as_img_mut()).await
}
pub async fn add_squashfs(
ctx: RegistryContext,
params: AddAssetParams,
) -> Result<Option<RequestGuid>, Error> {
pub async fn add_squashfs(ctx: RegistryContext, params: AddAssetParams) -> Result<(), Error> {
add_asset(ctx, params, |m| m.as_squashfs_mut()).await
}
@@ -205,11 +152,9 @@ pub struct CliAddAssetParams {
#[arg(short = 'p', long = "platform")]
pub platform: InternedString,
#[arg(short = 'v', long = "version")]
pub version: Version,
pub version: VersionString,
pub file: PathBuf,
pub url: Url,
#[arg(short = 'u', long = "upload")]
pub upload: bool,
}
pub async fn cli_add_asset(
@@ -223,7 +168,6 @@ pub async fn cli_add_asset(
version,
file: path,
url,
upload,
},
..
}: HandlerArgs<CliContext, CliAddAssetParams>,
@@ -240,21 +184,18 @@ pub async fn cli_add_asset(
}
};
let file = tokio::fs::File::open(&path).await?.into();
let file = MultiCursorFile::from(tokio::fs::File::open(&path).await?);
let mut progress = FullProgressTracker::new();
let progress_handle = progress.handle();
let mut sign_phase =
progress_handle.add_phase(InternedString::intern("Signing File"), Some(10));
let mut verify_phase =
progress_handle.add_phase(InternedString::intern("Verifying URL"), Some(100));
let mut index_phase = progress_handle.add_phase(
InternedString::intern("Adding File to Registry Index"),
Some(1),
);
let mut upload_phase = if upload {
Some(progress_handle.add_phase(InternedString::intern("Uploading File"), Some(100)))
} else {
None
};
let progress_task: NonDetachingJoinHandle<()> = tokio::spawn(async move {
let mut bar = PhasedProgressBar::new(&format!("Adding {} to registry...", path.display()));
@@ -270,70 +211,46 @@ pub async fn cli_add_asset(
.into();
sign_phase.start();
let blake3_sig =
Blake3Ed25519Signature::sign_file(ctx.developer_key()?, &file, SIG_CONTEXT).await?;
let size = blake3_sig.size;
let signature = Signature::Blake3Ed25519(blake3_sig);
let blake3 = file.blake3_mmap().await?;
let size = file
.size()
.await
.ok_or_else(|| Error::new(eyre!("failed to read file metadata"), ErrorKind::Filesystem))?;
let commitment = Blake3Commitment {
hash: Base64(*blake3.as_bytes()),
size,
};
let signature = Ed25519.sign_commitment(ctx.developer_key()?, &commitment, SIG_CONTEXT)?;
sign_phase.complete();
index_phase.start();
let add_res = from_value::<Option<RequestGuid>>(
ctx.call_remote::<RegistryContext>(
&parent_method
.into_iter()
.chain(method)
.chain([ext])
.join("."),
imbl_value::json!({
"platform": platform,
"version": version,
"url": &url,
"signature": signature,
"upload": upload,
}),
)
.await?,
)?;
index_phase.complete();
verify_phase.start();
let src = HttpSource::new(ctx.client.clone(), url.clone()).await?;
let mut writer = verify_phase.writer(VerifyingWriter::new(
tokio::io::sink(),
Some((blake3::Hash::from_bytes(*commitment.hash), commitment.size)),
));
src.copy_all_to(&mut writer).await?;
let (verifier, mut verify_phase) = writer.into_inner();
verifier.verify().await?;
verify_phase.complete();
if let Some(guid) = add_res {
upload_phase.as_mut().map(|p| p.start());
upload_phase.as_mut().map(|p| p.set_total(size));
let reg_url = ctx.registry_url.as_ref().or_not_found("--registry")?;
ctx.client
.post(url)
.header("X-StartOS-Registry-Token", guid.as_ref())
.header(
"X-StartOS-Registry-Auth-Sig",
base64::encode(
ctx.developer_key()?
.sign_prehashed(
Sha512::new_with_prefix(guid.as_ref().as_bytes()),
Some(
reg_url
.host()
.or_not_found("registry hostname")?
.to_string()
.as_bytes(),
),
)?
.to_bytes(),
),
)
.body(reqwest::Body::wrap_stream(
tokio_util::io::ReaderStream::new(file.fetch(0, size).await?).inspect_ok(
move |b| {
upload_phase
.as_mut()
.map(|p| *p += b.len() as u64)
.apply(|_| ())
},
),
))
.send()
.await?;
// upload_phase.as_mut().map(|p| p.complete());
}
index_phase.start();
ctx.call_remote::<RegistryContext>(
&parent_method
.into_iter()
.chain(method)
.chain([ext])
.join("."),
imbl_value::json!({
"platform": platform,
"version": version,
"url": &url,
"signature": signature,
"commitment": commitment,
}),
)
.await?;
index_phase.complete();
progress_handle.complete();

View File

@@ -16,8 +16,11 @@ use crate::progress::{FullProgressTracker, PhasedProgressBar};
use crate::registry::asset::RegistryAsset;
use crate::registry::context::RegistryContext;
use crate::registry::os::index::OsVersionInfo;
use crate::registry::os::SIG_CONTEXT;
use crate::registry::signer::commitment::blake3::Blake3Commitment;
use crate::registry::signer::commitment::Commitment;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::util::Version;
use crate::util::VersionString;
pub fn get_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
@@ -33,8 +36,7 @@ pub fn get_api<C: Context>() -> ParentHandler<C> {
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct GetOsAssetParams {
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
#[ts(type = "string")]
pub platform: InternedString,
}
@@ -42,10 +44,12 @@ pub struct GetOsAssetParams {
async fn get_os_asset(
ctx: RegistryContext,
GetOsAssetParams { version, platform }: GetOsAssetParams,
accessor: impl FnOnce(&Model<OsVersionInfo>) -> &Model<BTreeMap<InternedString, RegistryAsset>>
accessor: impl FnOnce(
&Model<OsVersionInfo>,
) -> &Model<BTreeMap<InternedString, RegistryAsset<Blake3Commitment>>>
+ UnwindSafe
+ Send,
) -> Result<RegistryAsset, Error> {
) -> Result<RegistryAsset<Blake3Commitment>, Error> {
accessor(
ctx.db
.peek()
@@ -64,21 +68,21 @@ async fn get_os_asset(
pub async fn get_iso(
ctx: RegistryContext,
params: GetOsAssetParams,
) -> Result<RegistryAsset, Error> {
) -> Result<RegistryAsset<Blake3Commitment>, Error> {
get_os_asset(ctx, params, |info| info.as_iso()).await
}
pub async fn get_img(
ctx: RegistryContext,
params: GetOsAssetParams,
) -> Result<RegistryAsset, Error> {
) -> Result<RegistryAsset<Blake3Commitment>, Error> {
get_os_asset(ctx, params, |info| info.as_img()).await
}
pub async fn get_squashfs(
ctx: RegistryContext,
params: GetOsAssetParams,
) -> Result<RegistryAsset, Error> {
) -> Result<RegistryAsset<Blake3Commitment>, Error> {
get_os_asset(ctx, params, |info| info.as_squashfs()).await
}
@@ -86,7 +90,7 @@ pub async fn get_squashfs(
#[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")]
pub struct CliGetOsAssetParams {
pub version: Version,
pub version: VersionString,
pub platform: InternedString,
#[arg(long = "download", short = 'd')]
pub download: Option<PathBuf>,
@@ -112,8 +116,8 @@ async fn cli_get_os_asset(
},
..
}: HandlerArgs<CliContext, CliGetOsAssetParams>,
) -> Result<RegistryAsset, Error> {
let res = from_value::<RegistryAsset>(
) -> Result<RegistryAsset<Blake3Commitment>, Error> {
let res = from_value::<RegistryAsset<Blake3Commitment>>(
ctx.call_remote::<RegistryContext>(
&parent_method.into_iter().chain(method).join("."),
json!({
@@ -124,7 +128,7 @@ async fn cli_get_os_asset(
.await?,
)?;
let validator = res.validate(res.signature_info.all_signers())?;
res.validate(SIG_CONTEXT, res.all_signers())?;
if let Some(download) = download {
let mut file = AtomicFile::new(&download, None::<&Path>)
@@ -135,7 +139,7 @@ async fn cli_get_os_asset(
let progress_handle = progress.handle();
let mut download_phase =
progress_handle.add_phase(InternedString::intern("Downloading File"), Some(100));
download_phase.set_total(validator.size()?);
download_phase.set_total(res.commitment.size);
let reverify_phase = if reverify {
Some(progress_handle.add_phase(InternedString::intern("Reverifying File"), Some(10)))
} else {
@@ -157,7 +161,7 @@ async fn cli_get_os_asset(
download_phase.start();
let mut download_writer = download_phase.writer(&mut *file);
res.download(ctx.client.clone(), &mut download_writer, &validator)
res.download(ctx.client.clone(), &mut download_writer)
.await?;
let (_, mut download_phase) = download_writer.into_inner();
file.save().await.with_kind(ErrorKind::Filesystem)?;
@@ -165,8 +169,8 @@ async fn cli_get_os_asset(
if let Some(mut reverify_phase) = reverify_phase {
reverify_phase.start();
validator
.validate_file(&MultiCursorFile::from(
res.commitment
.check(&MultiCursorFile::from(
tokio::fs::File::open(download).await?,
))
.await?;

View File

@@ -17,25 +17,47 @@ use crate::registry::asset::RegistryAsset;
use crate::registry::context::RegistryContext;
use crate::registry::os::index::OsVersionInfo;
use crate::registry::os::SIG_CONTEXT;
use crate::registry::signer::{Blake3Ed25519Signature, Signature};
use crate::util::Version;
use crate::registry::signer::commitment::blake3::Blake3Commitment;
use crate::registry::signer::sign::ed25519::Ed25519;
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme};
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::serde::Base64;
use crate::util::VersionString;
pub fn sign_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
.subcommand("iso", from_fn_async(sign_iso).no_cli())
.subcommand("img", from_fn_async(sign_img).no_cli())
.subcommand("squashfs", from_fn_async(sign_squashfs).no_cli())
.subcommand(
"iso",
from_fn_async(sign_iso)
.with_metadata("getSigner", Value::Bool(true))
.no_cli(),
)
.subcommand(
"img",
from_fn_async(sign_img)
.with_metadata("getSigner", Value::Bool(true))
.no_cli(),
)
.subcommand(
"squashfs",
from_fn_async(sign_squashfs)
.with_metadata("getSigner", Value::Bool(true))
.no_cli(),
)
}
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct SignAssetParams {
#[ts(type = "string")]
version: Version,
version: VersionString,
#[ts(type = "string")]
platform: InternedString,
signature: Signature,
#[ts(skip)]
#[serde(rename = "__auth_signer")]
signer: AnyVerifyingKey,
signature: AnySignature,
}
async fn sign_asset(
@@ -43,22 +65,25 @@ async fn sign_asset(
SignAssetParams {
version,
platform,
signer,
signature,
}: SignAssetParams,
accessor: impl FnOnce(&mut Model<OsVersionInfo>) -> &mut Model<BTreeMap<InternedString, RegistryAsset>>
accessor: impl FnOnce(
&mut Model<OsVersionInfo>,
) -> &mut Model<BTreeMap<InternedString, RegistryAsset<Blake3Commitment>>>
+ UnwindSafe
+ Send,
) -> Result<(), Error> {
ctx.db
.mutate(|db| {
let guid = db.as_index().as_signers().get_signer(&signature.signer())?;
let guid = db.as_index().as_signers().get_signer(&signer)?;
if !db
.as_index()
.as_os()
.as_versions()
.as_idx(&version)
.or_not_found(&version)?
.as_signers()
.as_authorized()
.de()?
.contains(&guid)
{
@@ -77,8 +102,16 @@ async fn sign_asset(
)
.as_idx_mut(&platform)
.or_not_found(&platform)?
.as_signature_info_mut()
.mutate(|s| s.add_sig(&signature))?;
.mutate(|s| {
signer.scheme().verify_commitment(
&signer,
&s.commitment,
SIG_CONTEXT,
&signature,
)?;
s.signatures.insert(signer, signature);
Ok(())
})?;
Ok(())
})
@@ -104,7 +137,7 @@ pub struct CliSignAssetParams {
#[arg(short = 'p', long = "platform")]
pub platform: InternedString,
#[arg(short = 'v', long = "version")]
pub version: Version,
pub version: VersionString,
pub file: PathBuf,
}
@@ -134,7 +167,7 @@ pub async fn cli_sign_asset(
}
};
let file = tokio::fs::File::open(&path).await?.into();
let file = MultiCursorFile::from(tokio::fs::File::open(&path).await?);
let mut progress = FullProgressTracker::new();
let progress_handle = progress.handle();
@@ -159,9 +192,16 @@ pub async fn cli_sign_asset(
.into();
sign_phase.start();
let blake3_sig =
Blake3Ed25519Signature::sign_file(ctx.developer_key()?, &file, SIG_CONTEXT).await?;
let signature = Signature::Blake3Ed25519(blake3_sig);
let blake3 = file.blake3_mmap().await?;
let size = file
.size()
.await
.ok_or_else(|| Error::new(eyre!("failed to read file metadata"), ErrorKind::Filesystem))?;
let commitment = Blake3Commitment {
hash: Base64(*blake3.as_bytes()),
size,
};
let signature = Ed25519.sign_commitment(ctx.developer_key()?, &commitment, SIG_CONTEXT)?;
sign_phase.complete();
index_phase.start();

View File

@@ -8,16 +8,16 @@ use ts_rs::TS;
use crate::prelude::*;
use crate::registry::asset::RegistryAsset;
use crate::registry::context::RegistryContext;
use crate::rpc_continuations::RequestGuid;
use crate::util::Version;
use crate::registry::signer::commitment::blake3::Blake3Commitment;
use crate::rpc_continuations::Guid;
use crate::util::VersionString;
#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct OsIndex {
#[ts(as = "BTreeMap::<String, OsVersionInfo>")]
pub versions: BTreeMap<Version, OsVersionInfo>,
pub versions: BTreeMap<VersionString, OsVersionInfo>,
}
#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)]
@@ -29,14 +29,13 @@ pub struct OsVersionInfo {
pub release_notes: String,
#[ts(type = "string")]
pub source_version: VersionRange,
#[ts(type = "string[]")]
pub signers: BTreeSet<RequestGuid>,
#[ts(as = "BTreeMap::<String, RegistryAsset>")]
pub iso: BTreeMap<InternedString, RegistryAsset>, // platform (i.e. x86_64-nonfree) -> asset
#[ts(as = "BTreeMap::<String, RegistryAsset>")]
pub squashfs: BTreeMap<InternedString, RegistryAsset>, // platform (i.e. x86_64-nonfree) -> asset
#[ts(as = "BTreeMap::<String, RegistryAsset>")]
pub img: BTreeMap<InternedString, RegistryAsset>, // platform (i.e. raspberrypi) -> asset
pub authorized: BTreeSet<Guid>,
#[ts(as = "BTreeMap::<String, RegistryAsset::<Blake3Commitment>>")]
pub iso: BTreeMap<InternedString, RegistryAsset<Blake3Commitment>>, // platform (i.e. x86_64-nonfree) -> asset
#[ts(as = "BTreeMap::<String, RegistryAsset::<Blake3Commitment>>")]
pub squashfs: BTreeMap<InternedString, RegistryAsset<Blake3Commitment>>, // platform (i.e. x86_64-nonfree) -> asset
#[ts(as = "BTreeMap::<String, RegistryAsset::<Blake3Commitment>>")]
pub img: BTreeMap<InternedString, RegistryAsset<Blake3Commitment>>, // platform (i.e. raspberrypi) -> asset
}
pub async fn get_os_index(ctx: RegistryContext) -> Result<OsIndex, Error> {

View File

@@ -11,9 +11,9 @@ use crate::context::CliContext;
use crate::prelude::*;
use crate::registry::context::RegistryContext;
use crate::registry::os::index::OsVersionInfo;
use crate::registry::signer::SignerKey;
use crate::registry::signer::sign::AnyVerifyingKey;
use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat};
use crate::util::Version;
use crate::util::VersionString;
pub mod signer;
@@ -51,8 +51,7 @@ pub fn version_api<C: Context>() -> ParentHandler<C> {
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct AddVersionParams {
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
pub headline: String,
pub release_notes: String,
#[ts(type = "string")]
@@ -60,7 +59,7 @@ pub struct AddVersionParams {
#[arg(skip)]
#[ts(skip)]
#[serde(rename = "__auth_signer")]
pub signer: Option<SignerKey>,
pub signer: Option<AnyVerifyingKey>,
}
pub async fn add_version(
@@ -86,7 +85,7 @@ pub async fn add_version(
i.headline = headline;
i.release_notes = release_notes;
i.source_version = source_version;
i.signers.extend(signer);
i.authorized.extend(signer);
Ok(())
})
})
@@ -98,8 +97,7 @@ pub async fn add_version(
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct RemoveVersionParams {
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
}
pub async fn remove_version(
@@ -124,7 +122,7 @@ pub async fn remove_version(
pub struct GetVersionParams {
#[ts(type = "string | null")]
#[arg(long = "src")]
pub source: Option<Version>,
pub source: Option<VersionString>,
#[ts(type = "string | null")]
#[arg(long = "target")]
pub target: Option<VersionRange>,
@@ -133,7 +131,7 @@ pub struct GetVersionParams {
pub async fn get_version(
ctx: RegistryContext,
GetVersionParams { source, target }: GetVersionParams,
) -> Result<BTreeMap<Version, OsVersionInfo>, Error> {
) -> Result<BTreeMap<VersionString, OsVersionInfo>, Error> {
let target = target.unwrap_or(VersionRange::Any);
ctx.db
.peek()
@@ -153,7 +151,10 @@ pub async fn get_version(
.collect()
}
pub fn display_version_info<T>(params: WithIoFormat<T>, info: BTreeMap<Version, OsVersionInfo>) {
pub fn display_version_info<T>(
params: WithIoFormat<T>,
info: BTreeMap<VersionString, OsVersionInfo>,
) {
use prettytable::*;
if let Some(format) = params.format {

View File

@@ -10,9 +10,9 @@ use crate::prelude::*;
use crate::registry::admin::display_signers;
use crate::registry::context::RegistryContext;
use crate::registry::signer::SignerInfo;
use crate::rpc_continuations::RequestGuid;
use crate::rpc_continuations::Guid;
use crate::util::serde::HandlerExtSerde;
use crate::util::Version;
use crate::util::VersionString;
pub fn signer_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
@@ -44,10 +44,8 @@ pub fn signer_api<C: Context>() -> ParentHandler<C> {
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct VersionSignerParams {
#[ts(type = "string")]
pub version: Version,
#[ts(type = "string")]
pub signer: RequestGuid,
pub version: VersionString,
pub signer: Guid,
}
pub async fn add_version_signer(
@@ -67,7 +65,7 @@ pub async fn add_version_signer(
.as_versions_mut()
.as_idx_mut(&version)
.or_not_found(&version)?
.as_signers_mut()
.as_authorized_mut()
.mutate(|s| Ok(s.insert(signer)))?;
Ok(())
@@ -87,7 +85,7 @@ pub async fn remove_version_signer(
.as_versions_mut()
.as_idx_mut(&version)
.or_not_found(&version)?
.as_signers_mut()
.as_authorized_mut()
.mutate(|s| Ok(s.remove(&signer)))?
{
return Err(Error::new(
@@ -106,21 +104,20 @@ pub async fn remove_version_signer(
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct ListVersionSignersParams {
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
}
pub async fn list_version_signers(
ctx: RegistryContext,
ListVersionSignersParams { version }: ListVersionSignersParams,
) -> Result<BTreeMap<RequestGuid, SignerInfo>, Error> {
) -> Result<BTreeMap<Guid, SignerInfo>, Error> {
let db = ctx.db.peek().await;
db.as_index()
.as_os()
.as_versions()
.as_idx(&version)
.or_not_found(&version)?
.as_signers()
.as_authorized()
.de()?
.into_iter()
.filter_map(|guid| {

View File

@@ -0,0 +1,170 @@
use std::path::PathBuf;
use std::sync::Arc;
use clap::Parser;
use helpers::NonDetachingJoinHandle;
use imbl_value::InternedString;
use itertools::Itertools;
use rpc_toolkit::HandlerArgs;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use url::Url;
use crate::context::CliContext;
use crate::prelude::*;
use crate::progress::{FullProgressTracker, PhasedProgressBar};
use crate::registry::context::RegistryContext;
use crate::registry::package::index::PackageVersionInfo;
use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment;
use crate::registry::signer::sign::ed25519::Ed25519;
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme};
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::v2::SIG_CONTEXT;
use crate::s9pk::S9pk;
use crate::util::io::TrackingIO;
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct AddPackageParams {
#[ts(type = "string")]
pub url: Url,
#[ts(skip)]
#[serde(rename = "__auth_signer")]
pub uploader: AnyVerifyingKey,
pub commitment: MerkleArchiveCommitment,
pub signature: AnySignature,
}
pub async fn add_package(
ctx: RegistryContext,
AddPackageParams {
url,
uploader,
commitment,
signature,
}: AddPackageParams,
) -> Result<(), Error> {
uploader
.scheme()
.verify_commitment(&uploader, &commitment, SIG_CONTEXT, &signature)?;
let peek = ctx.db.peek().await;
let uploader_guid = peek.as_index().as_signers().get_signer(&uploader)?;
let s9pk = S9pk::deserialize(
&Arc::new(HttpSource::new(ctx.client.clone(), url.clone()).await?),
Some(&commitment),
false,
)
.await?;
let manifest = s9pk.as_manifest();
let mut info = PackageVersionInfo::from_s9pk(&s9pk, url).await?;
if !info.s9pk.signatures.contains_key(&uploader) {
info.s9pk.signatures.insert(uploader.clone(), signature);
}
ctx.db
.mutate(|db| {
if db.as_admins().de()?.contains(&uploader_guid)
|| db
.as_index()
.as_package()
.as_packages()
.as_idx(&manifest.id)
.or_not_found(&manifest.id)?
.as_authorized()
.de()?
.contains(&uploader_guid)
{
let package = db
.as_index_mut()
.as_package_mut()
.as_packages_mut()
.upsert(&manifest.id, || Ok(Default::default()))?;
package.as_versions_mut().insert(&manifest.version, &info)?;
Ok(())
} else {
Err(Error::new(eyre!("UNAUTHORIZED"), ErrorKind::Authorization))
}
})
.await
}
#[derive(Debug, Deserialize, Serialize, Parser)]
#[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")]
pub struct CliAddPackageParams {
pub file: PathBuf,
pub url: Url,
}
pub async fn cli_add_package(
HandlerArgs {
context: ctx,
parent_method,
method,
params: CliAddPackageParams { file, url },
..
}: HandlerArgs<CliContext, CliAddPackageParams>,
) -> Result<(), Error> {
let s9pk = S9pk::open(&file, None, false).await?;
let mut progress = FullProgressTracker::new();
let progress_handle = progress.handle();
let mut sign_phase = progress_handle.add_phase(InternedString::intern("Signing File"), Some(1));
let mut verify_phase =
progress_handle.add_phase(InternedString::intern("Verifying URL"), Some(100));
let mut index_phase = progress_handle.add_phase(
InternedString::intern("Adding File to Registry Index"),
Some(1),
);
let progress_task: NonDetachingJoinHandle<()> = tokio::spawn(async move {
let mut bar = PhasedProgressBar::new(&format!("Adding {} to registry...", file.display()));
loop {
let snap = progress.snapshot();
bar.update(&snap);
if snap.overall.is_complete() {
break;
}
progress.changed().await
}
})
.into();
sign_phase.start();
let commitment = s9pk.as_archive().commitment().await?;
let signature = Ed25519.sign_commitment(ctx.developer_key()?, &commitment, SIG_CONTEXT)?;
sign_phase.complete();
verify_phase.start();
let mut src = S9pk::deserialize(
&Arc::new(HttpSource::new(ctx.client.clone(), url.clone()).await?),
Some(&commitment),
false,
)
.await?;
src.serialize(&mut TrackingIO::new(0, tokio::io::sink()), true)
.await?;
verify_phase.complete();
index_phase.start();
ctx.call_remote::<RegistryContext>(
&parent_method.into_iter().chain(method).join("."),
imbl_value::json!({
"url": &url,
"signature": signature,
"commitment": commitment,
}),
)
.await?;
index_phase.complete();
progress_handle.complete();
progress_task.await.with_kind(ErrorKind::Unknown)?;
Ok(())
}

View File

@@ -0,0 +1,387 @@
use std::collections::{BTreeMap, BTreeSet};
use clap::{Parser, ValueEnum};
use emver::{Version, VersionRange};
use imbl_value::InternedString;
use itertools::Itertools;
use models::PackageId;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use crate::prelude::*;
use crate::registry::context::RegistryContext;
use crate::registry::device_info::DeviceInfo;
use crate::registry::package::index::{PackageIndex, PackageVersionInfo};
use crate::util::serde::{display_serializable, WithIoFormat};
use crate::util::VersionString;
#[derive(
Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS, ValueEnum,
)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub enum PackageDetailLevel {
Short,
Full,
}
impl Default for PackageDetailLevel {
fn default() -> Self {
Self::Short
}
}
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct PackageInfoShort {
pub release_notes: String,
}
#[derive(Debug, Deserialize, Serialize, TS, Parser)]
#[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")]
#[ts(export)]
pub struct GetPackageParams {
pub id: Option<PackageId>,
#[ts(type = "string | null")]
pub version: Option<VersionRange>,
#[ts(type = "string | null")]
pub source_version: Option<Version>,
#[ts(skip)]
#[arg(skip)]
#[serde(rename = "__device_info")]
pub device_info: Option<DeviceInfo>,
pub other_versions: Option<PackageDetailLevel>,
}
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct GetPackageResponse {
#[ts(type = "string[]")]
pub categories: BTreeSet<InternedString>,
pub best: BTreeMap<VersionString, PackageVersionInfo>,
#[serde(skip_serializing_if = "Option::is_none")]
#[ts(optional)]
pub other_versions: Option<BTreeMap<VersionString, PackageInfoShort>>,
}
impl GetPackageResponse {
pub fn tables(&self) -> Vec<prettytable::Table> {
use prettytable::*;
let mut res = Vec::with_capacity(self.best.len());
for (version, info) in &self.best {
let mut table = info.table(version);
let lesser_versions: BTreeMap<_, _> = self
.other_versions
.as_ref()
.into_iter()
.flatten()
.filter(|(v, _)| ***v < **version)
.collect();
if !lesser_versions.is_empty() {
table.add_row(row![bc => "OLDER VERSIONS"]);
table.add_row(row![bc => "VERSION", "RELEASE NOTES"]);
for (version, info) in lesser_versions {
table.add_row(row![AsRef::<str>::as_ref(version), &info.release_notes]);
}
}
res.push(table);
}
res
}
}
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct GetPackageResponseFull {
#[ts(type = "string[]")]
pub categories: BTreeSet<InternedString>,
pub best: BTreeMap<VersionString, PackageVersionInfo>,
pub other_versions: BTreeMap<VersionString, PackageVersionInfo>,
}
impl GetPackageResponseFull {
pub fn tables(&self) -> Vec<prettytable::Table> {
let mut res = Vec::with_capacity(self.best.len());
let all: BTreeMap<_, _> = self.best.iter().chain(self.other_versions.iter()).collect();
for (version, info) in all {
res.push(info.table(version));
}
res
}
}
pub type GetPackagesResponse = BTreeMap<PackageId, GetPackageResponse>;
pub type GetPackagesResponseFull = BTreeMap<PackageId, GetPackageResponseFull>;
fn get_matching_models<'a>(
db: &'a Model<PackageIndex>,
GetPackageParams {
id,
version,
source_version,
device_info,
..
}: &GetPackageParams,
) -> Result<Vec<(PackageId, Version, &'a Model<PackageVersionInfo>)>, Error> {
if let Some(id) = id {
if let Some(pkg) = db.as_packages().as_idx(id) {
vec![(id.clone(), pkg)]
} else {
vec![]
}
} else {
db.as_packages().as_entries()?
}
.iter()
.map(|(k, v)| {
Ok(v.as_versions()
.as_entries()?
.into_iter()
.map(|(v, info)| {
Ok::<_, Error>(
if version
.as_ref()
.map_or(true, |version| v.satisfies(version))
&& source_version.as_ref().map_or(Ok(true), |source_version| {
Ok::<_, Error>(
source_version.satisfies(
&info
.as_source_version()
.de()?
.unwrap_or(VersionRange::any()),
),
)
})?
&& device_info
.as_ref()
.map_or(Ok(true), |device_info| info.works_for_device(device_info))?
{
Some((k.clone(), Version::from(v), info))
} else {
None
},
)
})
.flatten_ok())
})
.flatten_ok()
.map(|res| res.and_then(|a| a))
.collect()
}
pub async fn get_package(ctx: RegistryContext, params: GetPackageParams) -> Result<Value, Error> {
use patch_db::ModelExt;
let peek = ctx.db.peek().await;
let mut best: BTreeMap<PackageId, BTreeMap<VersionString, &Model<PackageVersionInfo>>> =
Default::default();
let mut other: BTreeMap<PackageId, BTreeMap<VersionString, &Model<PackageVersionInfo>>> =
Default::default();
for (id, version, info) in get_matching_models(&peek.as_index().as_package(), &params)? {
let mut package_best = best.remove(&id).unwrap_or_default();
let mut package_other = other.remove(&id).unwrap_or_default();
for worse_version in package_best
.keys()
.filter(|k| ***k < version)
.cloned()
.collect_vec()
{
if let Some(info) = package_best.remove(&worse_version) {
package_other.insert(worse_version, info);
}
}
if package_best.keys().all(|k| !(**k > version)) {
package_best.insert(version.into(), info);
}
best.insert(id.clone(), package_best);
if params.other_versions.is_some() {
other.insert(id.clone(), package_other);
}
}
if let Some(id) = params.id {
let categories = peek
.as_index()
.as_package()
.as_packages()
.as_idx(&id)
.map(|p| p.as_categories().de())
.transpose()?
.unwrap_or_default();
let best = best
.remove(&id)
.unwrap_or_default()
.into_iter()
.map(|(k, v)| v.de().map(|v| (k, v)))
.try_collect()?;
let other = other.remove(&id).unwrap_or_default();
match params.other_versions {
None => to_value(&GetPackageResponse {
categories,
best,
other_versions: None,
}),
Some(PackageDetailLevel::Short) => to_value(&GetPackageResponse {
categories,
best,
other_versions: Some(
other
.into_iter()
.map(|(k, v)| from_value(v.as_value().clone()).map(|v| (k, v)))
.try_collect()?,
),
}),
Some(PackageDetailLevel::Full) => to_value(&GetPackageResponseFull {
categories,
best,
other_versions: other
.into_iter()
.map(|(k, v)| v.de().map(|v| (k, v)))
.try_collect()?,
}),
}
} else {
match params.other_versions {
None => to_value(
&best
.into_iter()
.map(|(id, best)| {
let categories = peek
.as_index()
.as_package()
.as_packages()
.as_idx(&id)
.map(|p| p.as_categories().de())
.transpose()?
.unwrap_or_default();
Ok::<_, Error>((
id,
GetPackageResponse {
categories,
best: best
.into_iter()
.map(|(k, v)| v.de().map(|v| (k, v)))
.try_collect()?,
other_versions: None,
},
))
})
.try_collect::<_, GetPackagesResponse, _>()?,
),
Some(PackageDetailLevel::Short) => to_value(
&best
.into_iter()
.map(|(id, best)| {
let categories = peek
.as_index()
.as_package()
.as_packages()
.as_idx(&id)
.map(|p| p.as_categories().de())
.transpose()?
.unwrap_or_default();
let other = other.remove(&id).unwrap_or_default();
Ok::<_, Error>((
id,
GetPackageResponse {
categories,
best: best
.into_iter()
.map(|(k, v)| v.de().map(|v| (k, v)))
.try_collect()?,
other_versions: Some(
other
.into_iter()
.map(|(k, v)| {
from_value(v.as_value().clone()).map(|v| (k, v))
})
.try_collect()?,
),
},
))
})
.try_collect::<_, GetPackagesResponse, _>()?,
),
Some(PackageDetailLevel::Full) => to_value(
&best
.into_iter()
.map(|(id, best)| {
let categories = peek
.as_index()
.as_package()
.as_packages()
.as_idx(&id)
.map(|p| p.as_categories().de())
.transpose()?
.unwrap_or_default();
let other = other.remove(&id).unwrap_or_default();
Ok::<_, Error>((
id,
GetPackageResponseFull {
categories,
best: best
.into_iter()
.map(|(k, v)| v.de().map(|v| (k, v)))
.try_collect()?,
other_versions: other
.into_iter()
.map(|(k, v)| v.de().map(|v| (k, v)))
.try_collect()?,
},
))
})
.try_collect::<_, GetPackagesResponseFull, _>()?,
),
}
}
}
pub fn display_package_info(
params: WithIoFormat<GetPackageParams>,
info: Value,
) -> Result<(), Error> {
if let Some(format) = params.format {
display_serializable(format, info);
return Ok(());
}
if let Some(_) = params.rest.id {
if params.rest.other_versions == Some(PackageDetailLevel::Full) {
for table in from_value::<GetPackageResponseFull>(info)?.tables() {
table.print_tty(false)?;
println!();
}
} else {
for table in from_value::<GetPackageResponse>(info)?.tables() {
table.print_tty(false)?;
println!();
}
}
} else {
if params.rest.other_versions == Some(PackageDetailLevel::Full) {
for (_, package) in from_value::<GetPackagesResponseFull>(info)? {
for table in package.tables() {
table.print_tty(false)?;
println!();
}
}
} else {
for (_, package) in from_value::<GetPackagesResponse>(info)? {
for table in package.tables() {
table.print_tty(false)?;
println!();
}
}
}
}
Ok(())
}

View File

@@ -0,0 +1,163 @@
use std::collections::{BTreeMap, BTreeSet};
use emver::{Version, VersionRange};
use imbl_value::InternedString;
use models::{DataUrl, PackageId, VersionString};
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::registry::asset::RegistryAsset;
use crate::registry::context::RegistryContext;
use crate::registry::device_info::DeviceInfo;
use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment;
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey};
use crate::rpc_continuations::Guid;
use crate::s9pk::git_hash::GitHash;
use crate::s9pk::manifest::{Description, HardwareRequirements};
use crate::s9pk::merkle_archive::source::FileSource;
use crate::s9pk::S9pk;
#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct PackageIndex {
#[ts(as = "BTreeMap::<String, Category>")]
pub categories: BTreeMap<InternedString, Category>,
pub packages: BTreeMap<PackageId, PackageInfo>,
}
#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct PackageInfo {
pub authorized: BTreeSet<Guid>,
pub versions: BTreeMap<VersionString, PackageVersionInfo>,
#[ts(type = "string[]")]
pub categories: BTreeSet<InternedString>,
}
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct Category {
pub name: String,
pub description: Description,
}
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct PackageVersionInfo {
pub title: String,
pub icon: DataUrl<'static>,
pub description: Description,
pub release_notes: String,
#[ts(type = "string")]
pub git_hash: GitHash,
#[ts(type = "string")]
pub license: InternedString,
#[ts(type = "string")]
pub wrapper_repo: Url,
#[ts(type = "string")]
pub upstream_repo: Url,
#[ts(type = "string")]
pub support_site: Url,
#[ts(type = "string")]
pub marketing_site: Url,
pub os_version: VersionString,
pub hardware_requirements: HardwareRequirements,
#[ts(type = "string | null")]
pub source_version: Option<VersionRange>,
pub s9pk: RegistryAsset<MerkleArchiveCommitment>,
}
impl PackageVersionInfo {
pub async fn from_s9pk<S: FileSource + Clone>(s9pk: &S9pk<S>, url: Url) -> Result<Self, Error> {
let manifest = s9pk.as_manifest();
Ok(Self {
title: manifest.title.clone(),
icon: s9pk.icon_data_url().await?,
description: manifest.description.clone(),
release_notes: manifest.release_notes.clone(),
git_hash: manifest.git_hash.clone().or_not_found("git hash")?,
license: manifest.license.clone(),
wrapper_repo: manifest.wrapper_repo.clone(),
upstream_repo: manifest.upstream_repo.clone(),
support_site: manifest.support_site.clone(),
marketing_site: manifest.marketing_site.clone(),
os_version: manifest.os_version.clone(),
hardware_requirements: manifest.hardware_requirements.clone(),
source_version: None, // TODO
s9pk: RegistryAsset {
url,
commitment: s9pk.as_archive().commitment().await?,
signatures: [(
AnyVerifyingKey::Ed25519(s9pk.as_archive().signer()),
AnySignature::Ed25519(s9pk.as_archive().signature().await?),
)]
.into_iter()
.collect(),
},
})
}
pub fn table(&self, version: &VersionString) -> prettytable::Table {
use prettytable::*;
let mut table = Table::new();
table.add_row(row![bc => &self.title]);
table.add_row(row![br -> "VERSION", AsRef::<str>::as_ref(version)]);
table.add_row(row![br -> "RELEASE NOTES", &self.release_notes]);
table.add_row(row![br -> "ABOUT", &self.description.short]);
table.add_row(row![br -> "DESCRIPTION", &self.description.long]);
table.add_row(row![br -> "GIT HASH", AsRef::<str>::as_ref(&self.git_hash)]);
table.add_row(row![br -> "LICENSE", &self.license]);
table.add_row(row![br -> "PACKAGE REPO", &self.wrapper_repo.to_string()]);
table.add_row(row![br -> "SERVICE REPO", &self.upstream_repo.to_string()]);
table.add_row(row![br -> "WEBSITE", &self.marketing_site.to_string()]);
table.add_row(row![br -> "SUPPORT", &self.support_site.to_string()]);
table
}
}
impl Model<PackageVersionInfo> {
pub fn works_for_device(&self, device_info: &DeviceInfo) -> Result<bool, Error> {
if !self.as_os_version().de()?.satisfies(&device_info.os.compat) {
return Ok(false);
}
let hw = self.as_hardware_requirements().de()?;
if let Some(arch) = hw.arch {
if !arch.contains(&device_info.hardware.arch) {
return Ok(false);
}
}
if let Some(ram) = hw.ram {
if device_info.hardware.ram < ram {
return Ok(false);
}
}
for (class, regex) in hw.device {
if !device_info
.hardware
.devices
.get(&*class)
.unwrap_or(&Vec::new())
.iter()
.any(|product| regex.as_ref().is_match(product))
{
return Ok(false);
}
}
Ok(true)
}
}
pub async fn get_package_index(ctx: RegistryContext) -> Result<PackageIndex, Error> {
ctx.db.peek().await.into_index().into_package().de()
}

View File

@@ -0,0 +1,29 @@
use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler};
use crate::context::CliContext;
use crate::prelude::*;
use crate::util::serde::HandlerExtSerde;
pub mod add;
pub mod get;
pub mod index;
pub fn package_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
.subcommand(
"index",
from_fn_async(index::get_package_index)
.with_display_serializable()
.with_call_remote::<CliContext>(),
)
.subcommand("add", from_fn_async(add::add_package).no_cli())
.subcommand("add", from_fn_async(add::cli_add_package).no_display())
.subcommand(
"get",
from_fn_async(get::get_package)
.with_display_serializable()
.with_custom_display_fn(|handle, result| {
get::display_package_info(handle.params, result)
}),
)
}

View File

@@ -1,477 +0,0 @@
use std::collections::{HashMap, HashSet};
use std::path::Path;
use std::str::FromStr;
use clap::builder::ValueParserFactory;
use imbl_value::InternedString;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha512};
use tokio::io::AsyncWrite;
use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::{ArchiveSource, FileSource};
use crate::util::clap::FromStrParser;
use crate::util::serde::{Base64, Pem};
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct SignerInfo {
pub name: String,
pub contact: Vec<ContactInfo>,
pub keys: HashSet<SignerKey>,
}
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, Hash, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
#[serde(tag = "alg", content = "pubkey")]
pub enum SignerKey {
Ed25519(Pem<ed25519_dalek::VerifyingKey>),
}
impl SignerKey {
pub fn verifier(&self) -> Verifier {
match self {
Self::Ed25519(k) => Verifier::Ed25519(*k, Sha512::new()),
}
}
pub fn verify_message(
&self,
message: &[u8],
signature: &[u8],
context: &str,
) -> Result<(), Error> {
let mut v = self.verifier();
v.update(message);
v.verify(signature, context)
}
}
impl std::fmt::Display for SignerKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Ed25519(k) => write!(f, "{k}"),
}
}
}
pub enum Verifier {
Ed25519(Pem<ed25519_dalek::VerifyingKey>, Sha512),
}
impl Verifier {
pub fn update(&mut self, data: &[u8]) {
match self {
Self::Ed25519(_, h) => h.update(data),
}
}
pub fn verify(self, signature: &[u8], context: &str) -> Result<(), Error> {
match self {
Self::Ed25519(k, h) => k.verify_prehashed_strict(
h,
Some(context.as_bytes()),
&ed25519_dalek::Signature::from_slice(signature)?,
)?,
}
Ok(())
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
// TODO: better types
pub enum ContactInfo {
Email(String),
Matrix(String),
Website(#[ts(type = "string")] Url),
}
impl std::fmt::Display for ContactInfo {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Email(e) => write!(f, "mailto:{e}"),
Self::Matrix(m) => write!(f, "https://matrix.to/#/{m}"),
Self::Website(w) => write!(f, "{w}"),
}
}
}
impl FromStr for ContactInfo {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(if let Some(s) = s.strip_prefix("mailto:") {
Self::Email(s.to_owned())
} else if let Some(s) = s.strip_prefix("https://matrix.to/#/") {
Self::Matrix(s.to_owned())
} else {
Self::Website(s.parse()?)
})
}
}
impl ValueParserFactory for ContactInfo {
type Parser = FromStrParser<Self>;
fn value_parser() -> Self::Parser {
Self::Parser::new()
}
}
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "kebab-case")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct SignatureInfo {
#[ts(type = "string")]
pub context: InternedString,
pub blake3_ed255i9: Option<Blake3Ed2551SignatureInfo>,
}
impl SignatureInfo {
pub fn new(context: &str) -> Self {
Self {
context: context.into(),
blake3_ed255i9: None,
}
}
pub fn validate(&self, accept: AcceptSigners) -> Result<FileValidator, Error> {
FileValidator::from_signatures(self.signatures(), accept, &self.context)
}
pub fn all_signers(&self) -> AcceptSigners {
AcceptSigners::All(
self.signatures()
.map(|s| AcceptSigners::Signer(s.signer()))
.collect(),
)
.flatten()
}
pub fn signatures(&self) -> impl Iterator<Item = Signature> + '_ {
self.blake3_ed255i9.iter().flat_map(|info| {
info.signatures
.iter()
.map(|(k, s)| (k.clone(), *s))
.map(|(pubkey, signature)| {
Signature::Blake3Ed25519(Blake3Ed25519Signature {
hash: info.hash,
size: info.size,
pubkey,
signature,
})
})
})
}
pub fn add_sig(&mut self, signature: &Signature) -> Result<(), Error> {
signature.validate(&self.context)?;
match signature {
Signature::Blake3Ed25519(s) => {
if self
.blake3_ed255i9
.as_ref()
.map_or(true, |info| info.hash == s.hash)
{
let new = if let Some(mut info) = self.blake3_ed255i9.take() {
info.signatures.insert(s.pubkey, s.signature);
info
} else {
s.info()
};
self.blake3_ed255i9 = Some(new);
Ok(())
} else {
Err(Error::new(
eyre!("hash sum mismatch"),
ErrorKind::InvalidSignature,
))
}
}
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub enum AcceptSigners {
#[serde(skip)]
Accepted(Signature),
Signer(SignerKey),
Any(Vec<AcceptSigners>),
All(Vec<AcceptSigners>),
}
impl AcceptSigners {
const fn null() -> Self {
Self::Any(Vec::new())
}
pub fn flatten(self) -> Self {
match self {
Self::Any(mut s) | Self::All(mut s) if s.len() == 1 => s.swap_remove(0).flatten(),
s => s,
}
}
pub fn accepted(&self) -> bool {
match self {
Self::Accepted(_) => true,
Self::All(s) => s.iter().all(|s| s.accepted()),
_ => false,
}
}
pub fn try_accept(
self,
context: &str,
) -> Box<dyn Iterator<Item = Result<Signature, Error>> + Send + Sync + '_> {
match self {
Self::Accepted(s) => Box::new(std::iter::once(s).map(|s| {
s.validate(context)?;
Ok(s)
})),
Self::All(s) => Box::new(s.into_iter().flat_map(|s| s.try_accept(context))),
_ => Box::new(std::iter::once(Err(Error::new(
eyre!("signer(s) not accepted"),
ErrorKind::InvalidSignature,
)))),
}
}
pub fn process_signature(&mut self, sig: &Signature) {
let new = match std::mem::replace(self, Self::null()) {
Self::Accepted(s) => Self::Accepted(s),
Self::Signer(s) => {
if s == sig.signer() {
Self::Accepted(sig.clone())
} else {
Self::Signer(s)
}
}
Self::All(mut s) => {
s.iter_mut().for_each(|s| s.process_signature(sig));
Self::All(s)
}
Self::Any(mut s) => {
if let Some(s) = s
.iter_mut()
.map(|s| {
s.process_signature(sig);
s
})
.filter(|s| s.accepted())
.next()
{
std::mem::replace(s, Self::null())
} else {
Self::Any(s)
}
}
};
*self = new;
}
}
#[must_use]
pub struct FileValidator {
blake3: Option<blake3::Hash>,
size: Option<u64>,
}
impl FileValidator {
fn add_blake3(&mut self, hash: [u8; 32], size: u64) -> Result<(), Error> {
if let Some(h) = self.blake3 {
ensure_code!(h == hash, ErrorKind::InvalidSignature, "hash sum mismatch");
}
self.blake3 = Some(blake3::Hash::from_bytes(hash));
if let Some(s) = self.size {
ensure_code!(s == size, ErrorKind::InvalidSignature, "file size mismatch");
}
self.size = Some(size);
Ok(())
}
pub fn blake3(&self) -> Result<blake3::Hash, Error> {
if let Some(hash) = self.blake3 {
Ok(hash)
} else {
Err(Error::new(
eyre!("no BLAKE3 signatures found"),
ErrorKind::InvalidSignature,
))
}
}
pub fn size(&self) -> Result<u64, Error> {
if let Some(size) = self.size {
Ok(size)
} else {
Err(Error::new(
eyre!("no signatures found"),
ErrorKind::InvalidSignature,
))
}
}
pub fn from_signatures(
signatures: impl IntoIterator<Item = Signature>,
mut accept: AcceptSigners,
context: &str,
) -> Result<Self, Error> {
let mut res = Self {
blake3: None,
size: None,
};
for signature in signatures {
accept.process_signature(&signature);
}
for signature in accept.try_accept(context) {
match signature? {
Signature::Blake3Ed25519(s) => res.add_blake3(*s.hash, s.size)?,
}
}
Ok(res)
}
pub async fn download(
&self,
url: Url,
client: Client,
dst: &mut (impl AsyncWrite + Unpin + Send + ?Sized),
) -> Result<(), Error> {
let src = HttpSource::new(client, url).await?;
let (Some(hash), Some(size)) = (self.blake3, self.size) else {
return Err(Error::new(
eyre!("no BLAKE3 signatures found"),
ErrorKind::InvalidSignature,
));
};
src.section(0, size)
.copy_verify(dst, Some((hash, size)))
.await?;
Ok(())
}
pub async fn validate_file(&self, file: &MultiCursorFile) -> Result<(), Error> {
ensure_code!(
file.size().await == Some(self.size()?),
ErrorKind::InvalidSignature,
"file size mismatch"
);
ensure_code!(
file.blake3_mmap().await? == self.blake3()?,
ErrorKind::InvalidSignature,
"hash sum mismatch"
);
Ok(())
}
}
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct Blake3Ed2551SignatureInfo {
pub hash: Base64<[u8; 32]>,
pub size: u64,
pub signatures: HashMap<Pem<ed25519_dalek::VerifyingKey>, Base64<[u8; 64]>>,
}
impl Blake3Ed2551SignatureInfo {
pub fn validate(&self, context: &str) -> Result<Vec<Pem<ed25519_dalek::VerifyingKey>>, Error> {
self.signatures
.iter()
.map(|(k, s)| {
let sig = Blake3Ed25519Signature {
hash: self.hash,
size: self.size,
pubkey: k.clone(),
signature: *s,
};
sig.validate(context)?;
Ok(sig.pubkey)
})
.collect()
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub enum Signature {
Blake3Ed25519(Blake3Ed25519Signature),
}
impl Signature {
pub fn validate(&self, context: &str) -> Result<(), Error> {
match self {
Self::Blake3Ed25519(a) => a.validate(context),
}
}
pub fn signer(&self) -> SignerKey {
match self {
Self::Blake3Ed25519(s) => SignerKey::Ed25519(s.pubkey.clone()),
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct Blake3Ed25519Signature {
pub hash: Base64<[u8; 32]>,
pub size: u64,
pub pubkey: Pem<ed25519_dalek::VerifyingKey>,
// ed25519-sig(sha512(blake3(file) + len_u64_be(file)))
pub signature: Base64<[u8; 64]>,
}
impl Blake3Ed25519Signature {
pub async fn sign_file(
key: &ed25519_dalek::SigningKey,
file: &MultiCursorFile,
context: &str,
) -> Result<Self, Error> {
let size = file
.size()
.await
.ok_or_else(|| Error::new(eyre!("failed to get file size"), ErrorKind::Filesystem))?;
let hash = file.blake3_mmap().await?;
let signature = key.sign_prehashed(
Sha512::new_with_prefix(hash.as_bytes()).chain_update(u64::to_be_bytes(size)),
Some(context.as_bytes()),
)?;
Ok(Self {
hash: Base64(*hash.as_bytes()),
size,
pubkey: Pem::new(key.verifying_key()),
signature: Base64(signature.to_bytes()),
})
}
pub fn validate(&self, context: &str) -> Result<(), Error> {
let sig = ed25519_dalek::Signature::from_bytes(&*self.signature);
self.pubkey.verify_prehashed_strict(
Sha512::new_with_prefix(*self.hash).chain_update(u64::to_be_bytes(self.size)),
Some(context.as_bytes()),
&sig,
)?;
Ok(())
}
pub async fn check_file(&self, file: &MultiCursorFile) -> Result<(), Error> {
let size = file
.size()
.await
.ok_or_else(|| Error::new(eyre!("failed to get file size"), ErrorKind::Filesystem))?;
if self.size != size {
return Err(Error::new(
eyre!("incorrect file size: expected {} got {}", self.size, size),
ErrorKind::InvalidSignature,
));
}
let hash = file.blake3_mmap().await?;
if &*self.hash != hash.as_bytes() {
return Err(Error::new(
eyre!("hash sum mismatch"),
ErrorKind::InvalidSignature,
));
}
Ok(())
}
pub fn info(&self) -> Blake3Ed2551SignatureInfo {
Blake3Ed2551SignatureInfo {
hash: self.hash,
size: self.size,
signatures: [(self.pubkey, self.signature)].into_iter().collect(),
}
}
}

View File

@@ -0,0 +1,50 @@
use blake3::Hash;
use digest::Update;
use serde::{Deserialize, Serialize};
use tokio::io::AsyncWrite;
use ts_rs::TS;
use crate::prelude::*;
use crate::registry::signer::commitment::{Commitment, Digestable};
use crate::s9pk::merkle_archive::hash::VerifyingWriter;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::io::{ParallelBlake3Writer, TrackingIO};
use crate::util::serde::Base64;
use crate::CAP_10_MiB;
#[derive(Clone, Debug, Deserialize, Serialize, HasModel, PartialEq, Eq, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct Blake3Commitment {
pub hash: Base64<[u8; 32]>,
#[ts(type = "number")]
pub size: u64,
}
impl Digestable for Blake3Commitment {
fn update<D: Update>(&self, digest: &mut D) {
digest.update(&*self.hash);
digest.update(&u64::to_be_bytes(self.size));
}
}
impl<'a, Resource: ArchiveSource> Commitment<&'a Resource> for Blake3Commitment {
async fn create(resource: &'a Resource) -> Result<Self, Error> {
let mut hasher = TrackingIO::new(0, ParallelBlake3Writer::new(CAP_10_MiB));
resource.copy_all_to(&mut hasher).await?;
Ok(Self {
size: hasher.position(),
hash: Base64(*hasher.into_inner().finalize().await?.as_bytes()),
})
}
async fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: &'a Resource,
writer: W,
) -> Result<(), Error> {
let mut hasher =
VerifyingWriter::new(writer, Some((Hash::from_bytes(*self.hash), self.size)));
resource.copy_to(0, self.size, &mut hasher).await?;
hasher.verify().await?;
Ok(())
}
}

View File

@@ -0,0 +1,98 @@
use digest::Update;
use serde::{Deserialize, Serialize};
use tokio::io::AsyncWrite;
use ts_rs::TS;
use crate::prelude::*;
use crate::registry::signer::commitment::{Commitment, Digestable};
use crate::s9pk::merkle_archive::source::FileSource;
use crate::s9pk::merkle_archive::MerkleArchive;
use crate::s9pk::S9pk;
use crate::util::io::TrackingIO;
use crate::util::serde::Base64;
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct MerkleArchiveCommitment {
pub root_sighash: Base64<[u8; 32]>,
#[ts(type = "number")]
pub root_maxsize: u64,
}
impl Digestable for MerkleArchiveCommitment {
fn update<D: Update>(&self, digest: &mut D) {
digest.update(&*self.root_sighash);
digest.update(&u64::to_be_bytes(self.root_maxsize));
}
}
impl<'a, S: FileSource + Clone> Commitment<&'a MerkleArchive<S>> for MerkleArchiveCommitment {
async fn create(resource: &'a MerkleArchive<S>) -> Result<Self, Error> {
resource.commitment().await
}
async fn check(&self, resource: &'a MerkleArchive<S>) -> Result<(), Error> {
let MerkleArchiveCommitment {
root_sighash,
root_maxsize,
} = resource.commitment().await?;
if root_sighash != self.root_sighash {
return Err(Error::new(
eyre!("merkle root mismatch"),
ErrorKind::InvalidSignature,
));
}
if root_maxsize > self.root_maxsize {
return Err(Error::new(
eyre!("merkle root directory max size too large"),
ErrorKind::InvalidSignature,
));
}
Ok(())
}
async fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: &'a MerkleArchive<S>,
writer: W,
) -> Result<(), Error> {
self.check(resource).await?;
resource
.serialize(&mut TrackingIO::new(0, writer), true)
.await
}
}
impl<'a, S: FileSource + Clone> Commitment<&'a S9pk<S>> for MerkleArchiveCommitment {
async fn create(resource: &'a S9pk<S>) -> Result<Self, Error> {
resource.as_archive().commitment().await
}
async fn check(&self, resource: &'a S9pk<S>) -> Result<(), Error> {
let MerkleArchiveCommitment {
root_sighash,
root_maxsize,
} = resource.as_archive().commitment().await?;
if root_sighash != self.root_sighash {
return Err(Error::new(
eyre!("merkle root mismatch"),
ErrorKind::InvalidSignature,
));
}
if root_maxsize > self.root_maxsize {
return Err(Error::new(
eyre!("merkle root directory max size too large"),
ErrorKind::InvalidSignature,
));
}
Ok(())
}
async fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: &'a S9pk<S>,
writer: W,
) -> Result<(), Error> {
self.check(resource).await?;
resource
.clone()
.serialize(&mut TrackingIO::new(0, writer), true)
.await
}
}

View File

@@ -0,0 +1,25 @@
use digest::Update;
use futures::Future;
use tokio::io::AsyncWrite;
use crate::prelude::*;
pub mod blake3;
pub mod merkle_archive;
pub mod request;
pub trait Digestable {
fn update<D: Update>(&self, digest: &mut D);
}
pub trait Commitment<Resource>: Sized + Digestable {
fn create(resource: Resource) -> impl Future<Output = Result<Self, Error>> + Send;
fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: Resource,
writer: W,
) -> impl Future<Output = Result<(), Error>> + Send;
fn check(&self, resource: Resource) -> impl Future<Output = Result<(), Error>> + Send {
self.copy_to(resource, tokio::io::sink())
}
}

View File

@@ -0,0 +1,102 @@
use std::time::{SystemTime, UNIX_EPOCH};
use std::collections::BTreeMap;
use axum::body::Body;
use axum::extract::Request;
use digest::Update;
use futures::TryStreamExt;
use serde::{Deserialize, Serialize};
use tokio::io::AsyncWrite;
use tokio_util::io::StreamReader;
use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::registry::signer::commitment::{Commitment, Digestable};
use crate::s9pk::merkle_archive::hash::VerifyingWriter;
use crate::util::serde::Base64;
#[derive(Clone, Debug, Deserialize, Serialize, HasModel, PartialEq, Eq, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct RequestCommitment {
#[ts(type = "number")]
pub timestamp: i64,
#[ts(type = "number")]
pub nonce: u64,
#[ts(type = "number")]
pub size: u64,
pub blake3: Base64<[u8; 32]>,
}
impl RequestCommitment {
pub fn append_query(&self, url: &mut Url) {
url.query_pairs_mut()
.append_pair("timestamp", &self.timestamp.to_string())
.append_pair("nonce", &self.nonce.to_string())
.append_pair("size", &self.size.to_string())
.append_pair("blake3", &self.blake3.to_string());
}
pub fn from_query(url: &Url) -> Result<Self, Error> {
let query: BTreeMap<_, _> = url.query_pairs().collect();
Ok(Self {
timestamp: query.get("timestamp").or_not_found("timestamp")?.parse()?,
nonce: query.get("nonce").or_not_found("nonce")?.parse()?,
size: query.get("size").or_not_found("size")?.parse()?,
blake3: query.get("blake3").or_not_found("blake3")?.parse()?,
})
}
}
impl Digestable for RequestCommitment {
fn update<D: Update>(&self, digest: &mut D) {
digest.update(&i64::to_be_bytes(self.timestamp));
digest.update(&u64::to_be_bytes(self.nonce));
digest.update(&u64::to_be_bytes(self.size));
digest.update(&*self.blake3);
}
}
impl<'a> Commitment<&'a mut Request> for RequestCommitment {
async fn create(resource: &'a mut Request) -> Result<Self, Error> {
use http_body_util::BodyExt;
let body = std::mem::replace(resource.body_mut(), Body::empty())
.collect()
.await
.with_kind(ErrorKind::Network)?
.to_bytes();
let res = Self {
timestamp: SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs() as i64)
.unwrap_or_else(|e| e.duration().as_secs() as i64 * -1),
nonce: rand::random(),
size: body.len() as u64,
blake3: Base64(*blake3::hash(&*body).as_bytes()),
};
*resource.body_mut() = Body::from(body);
Ok(res)
}
async fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: &'a mut Request,
writer: W,
) -> Result<(), Error> {
use tokio::io::AsyncReadExt;
let mut body = StreamReader::new(
std::mem::replace(resource.body_mut(), Body::empty())
.into_data_stream()
.map_err(std::io::Error::other),
)
.take(self.size);
let mut writer = VerifyingWriter::new(
writer,
Some((blake3::Hash::from_bytes(*self.blake3), self.size)),
);
tokio::io::copy(&mut body, &mut writer).await?;
writer.verify().await?;
Ok(())
}
}

View File

@@ -0,0 +1,154 @@
use std::collections::HashSet;
use std::str::FromStr;
use clap::builder::ValueParserFactory;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::registry::signer::commitment::Digestable;
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme};
use crate::util::clap::FromStrParser;
pub mod commitment;
pub mod sign;
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct SignerInfo {
pub name: String,
pub contact: Vec<ContactInfo>,
pub keys: HashSet<AnyVerifyingKey>,
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
// TODO: better types
pub enum ContactInfo {
Email(String),
Matrix(String),
Website(#[ts(type = "string")] Url),
}
impl std::fmt::Display for ContactInfo {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Email(e) => write!(f, "mailto:{e}"),
Self::Matrix(m) => write!(f, "https://matrix.to/#/{m}"),
Self::Website(w) => write!(f, "{w}"),
}
}
}
impl FromStr for ContactInfo {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(if let Some(s) = s.strip_prefix("mailto:") {
Self::Email(s.to_owned())
} else if let Some(s) = s.strip_prefix("https://matrix.to/#/") {
Self::Matrix(s.to_owned())
} else {
Self::Website(s.parse()?)
})
}
}
impl ValueParserFactory for ContactInfo {
type Parser = FromStrParser<Self>;
fn value_parser() -> Self::Parser {
Self::Parser::new()
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub enum AcceptSigners {
#[serde(skip)]
Accepted,
Signer(AnyVerifyingKey),
Any(Vec<AcceptSigners>),
All(Vec<AcceptSigners>),
}
impl AcceptSigners {
const fn null() -> Self {
Self::Any(Vec::new())
}
pub fn flatten(self) -> Self {
match self {
Self::Any(mut s) | Self::All(mut s) if s.len() == 1 => s.swap_remove(0).flatten(),
s => s,
}
}
pub fn accepted(&self) -> bool {
match self {
Self::Accepted => true,
_ => false,
}
}
pub fn try_accept(self) -> Result<(), Error> {
if self.accepted() {
Ok(())
} else {
Err(Error::new(
eyre!("signer(s) not accepted"),
ErrorKind::InvalidSignature,
))
}
}
pub fn process_signature(
&mut self,
signer: &AnyVerifyingKey,
commitment: &impl Digestable,
context: &str,
signature: &AnySignature,
) -> Result<(), Error> {
let mut res = Ok(());
let new = match std::mem::replace(self, Self::null()) {
Self::Accepted => Self::Accepted,
Self::Signer(s) => {
if &s == signer {
res = signer
.scheme()
.verify_commitment(signer, commitment, context, signature);
Self::Accepted
} else {
Self::Signer(s)
}
}
Self::All(mut s) => {
res = s
.iter_mut()
.map(|s| s.process_signature(signer, commitment, context, signature))
.collect();
if s.iter().all(|s| s.accepted()) {
Self::Accepted
} else {
Self::All(s)
}
}
Self::Any(mut s) => {
match s
.iter_mut()
.map(|s| {
s.process_signature(signer, commitment, context, signature)?;
Ok(s)
})
.filter_ok(|s| s.accepted())
.next()
{
Some(Ok(s)) => std::mem::replace(s, Self::null()),
Some(Err(e)) => {
res = Err(e);
Self::Any(s)
}
None => Self::Any(s),
}
}
};
*self = new;
res
}
}

View File

@@ -0,0 +1,34 @@
use ed25519_dalek::{Signature, SigningKey, VerifyingKey};
use sha2::Sha512;
use crate::prelude::*;
use crate::registry::signer::sign::SignatureScheme;
pub struct Ed25519;
impl SignatureScheme for Ed25519 {
type SigningKey = SigningKey;
type VerifyingKey = VerifyingKey;
type Signature = Signature;
type Digest = Sha512;
fn new_digest(&self) -> Self::Digest {
<Self::Digest as digest::Digest>::new()
}
fn sign(
&self,
key: &Self::SigningKey,
digest: Self::Digest,
context: &str,
) -> Result<Self::Signature, Error> {
Ok(key.sign_prehashed(digest, Some(context.as_bytes()))?)
}
fn verify(
&self,
key: &Self::VerifyingKey,
digest: Self::Digest,
context: &str,
signature: &Self::Signature,
) -> Result<(), Error> {
key.verify_prehashed_strict(digest, Some(context.as_bytes()), signature)?;
Ok(())
}
}

View File

@@ -0,0 +1,348 @@
use std::fmt::Display;
use std::str::FromStr;
use ::ed25519::pkcs8::BitStringRef;
use clap::builder::ValueParserFactory;
use der::referenced::OwnedToRef;
use der::{Decode, Encode};
use pkcs8::der::AnyRef;
use pkcs8::{PrivateKeyInfo, SubjectPublicKeyInfo};
use serde::{Deserialize, Serialize};
use sha2::Sha512;
use ts_rs::TS;
use crate::prelude::*;
use crate::registry::signer::commitment::Digestable;
use crate::registry::signer::sign::ed25519::Ed25519;
use crate::util::clap::FromStrParser;
use crate::util::serde::{deserialize_from_str, serialize_display};
pub mod ed25519;
pub trait SignatureScheme {
type SigningKey;
type VerifyingKey;
type Signature;
type Digest: digest::Update;
fn new_digest(&self) -> Self::Digest;
fn sign(
&self,
key: &Self::SigningKey,
digest: Self::Digest,
context: &str,
) -> Result<Self::Signature, Error>;
fn sign_commitment<C: Digestable>(
&self,
key: &Self::SigningKey,
commitment: &C,
context: &str,
) -> Result<Self::Signature, Error> {
let mut digest = self.new_digest();
commitment.update(&mut digest);
self.sign(key, digest, context)
}
fn verify(
&self,
key: &Self::VerifyingKey,
digest: Self::Digest,
context: &str,
signature: &Self::Signature,
) -> Result<(), Error>;
fn verify_commitment<C: Digestable>(
&self,
key: &Self::VerifyingKey,
commitment: &C,
context: &str,
signature: &Self::Signature,
) -> Result<(), Error> {
let mut digest = self.new_digest();
commitment.update(&mut digest);
self.verify(key, digest, context, signature)
}
}
pub enum AnyScheme {
Ed25519(Ed25519),
}
impl From<Ed25519> for AnyScheme {
fn from(value: Ed25519) -> Self {
Self::Ed25519(value)
}
}
impl SignatureScheme for AnyScheme {
type SigningKey = AnySigningKey;
type VerifyingKey = AnyVerifyingKey;
type Signature = AnySignature;
type Digest = AnyDigest;
fn new_digest(&self) -> Self::Digest {
match self {
Self::Ed25519(s) => AnyDigest::Sha512(s.new_digest()),
}
}
fn sign(
&self,
key: &Self::SigningKey,
digest: Self::Digest,
context: &str,
) -> Result<Self::Signature, Error> {
match (self, key, digest) {
(Self::Ed25519(s), AnySigningKey::Ed25519(key), AnyDigest::Sha512(digest)) => {
Ok(AnySignature::Ed25519(s.sign(key, digest, context)?))
}
_ => Err(Error::new(
eyre!("mismatched signature algorithm"),
ErrorKind::InvalidSignature,
)),
}
}
fn verify(
&self,
key: &Self::VerifyingKey,
digest: Self::Digest,
context: &str,
signature: &Self::Signature,
) -> Result<(), Error> {
match (self, key, digest, signature) {
(
Self::Ed25519(s),
AnyVerifyingKey::Ed25519(key),
AnyDigest::Sha512(digest),
AnySignature::Ed25519(signature),
) => s.verify(key, digest, context, signature),
_ => Err(Error::new(
eyre!("mismatched signature algorithm"),
ErrorKind::InvalidSignature,
)),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, TS)]
#[ts(export, type = "string")]
pub enum AnySigningKey {
Ed25519(<Ed25519 as SignatureScheme>::SigningKey),
}
impl AnySigningKey {
pub fn scheme(&self) -> AnyScheme {
match self {
Self::Ed25519(_) => AnyScheme::Ed25519(Ed25519),
}
}
pub fn verifying_key(&self) -> AnyVerifyingKey {
match self {
Self::Ed25519(k) => AnyVerifyingKey::Ed25519(k.into()),
}
}
}
impl<'a> TryFrom<PrivateKeyInfo<'a>> for AnySigningKey {
type Error = pkcs8::Error;
fn try_from(value: PrivateKeyInfo<'a>) -> Result<Self, Self::Error> {
if value.algorithm == ed25519_dalek::pkcs8::ALGORITHM_ID {
Ok(Self::Ed25519(ed25519_dalek::SigningKey::try_from(value)?))
} else {
Err(pkcs8::spki::Error::OidUnknown {
oid: value.algorithm.oid,
}
.into())
}
}
}
impl pkcs8::EncodePrivateKey for AnySigningKey {
fn to_pkcs8_der(&self) -> pkcs8::Result<pkcs8::SecretDocument> {
match self {
Self::Ed25519(s) => s.to_pkcs8_der(),
}
}
}
impl FromStr for AnySigningKey {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use pkcs8::DecodePrivateKey;
Self::from_pkcs8_pem(s).with_kind(ErrorKind::Deserialization)
}
}
impl Display for AnySigningKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use pkcs8::EncodePrivateKey;
f.write_str(
&self
.to_pkcs8_pem(pkcs8::LineEnding::LF)
.map_err(|_| std::fmt::Error)?,
)
}
}
impl<'de> Deserialize<'de> for AnySigningKey {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserialize_from_str(deserializer)
}
}
impl Serialize for AnySigningKey {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serialize_display(self, serializer)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, TS)]
#[ts(export, type = "string")]
pub enum AnyVerifyingKey {
Ed25519(<Ed25519 as SignatureScheme>::VerifyingKey),
}
impl AnyVerifyingKey {
pub fn scheme(&self) -> AnyScheme {
match self {
Self::Ed25519(_) => AnyScheme::Ed25519(Ed25519),
}
}
}
impl<'a> TryFrom<SubjectPublicKeyInfo<AnyRef<'a>, BitStringRef<'a>>> for AnyVerifyingKey {
type Error = pkcs8::spki::Error;
fn try_from(
value: SubjectPublicKeyInfo<AnyRef<'a>, BitStringRef<'a>>,
) -> Result<Self, Self::Error> {
if value.algorithm == ed25519_dalek::pkcs8::ALGORITHM_ID {
Ok(Self::Ed25519(ed25519_dalek::VerifyingKey::try_from(value)?))
} else {
Err(pkcs8::spki::Error::OidUnknown {
oid: value.algorithm.oid,
})
}
}
}
impl pkcs8::EncodePublicKey for AnyVerifyingKey {
fn to_public_key_der(&self) -> pkcs8::spki::Result<pkcs8::Document> {
match self {
Self::Ed25519(s) => s.to_public_key_der(),
}
}
}
impl FromStr for AnyVerifyingKey {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use pkcs8::DecodePublicKey;
Self::from_public_key_pem(s).with_kind(ErrorKind::Deserialization)
}
}
impl Display for AnyVerifyingKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use pkcs8::EncodePublicKey;
f.write_str(
&self
.to_public_key_pem(pkcs8::LineEnding::LF)
.map_err(|_| std::fmt::Error)?,
)
}
}
impl<'de> Deserialize<'de> for AnyVerifyingKey {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserialize_from_str(deserializer)
}
}
impl Serialize for AnyVerifyingKey {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serialize_display(self, serializer)
}
}
impl ValueParserFactory for AnyVerifyingKey {
type Parser = FromStrParser<Self>;
fn value_parser() -> Self::Parser {
Self::Parser::new()
}
}
#[derive(Clone, Debug)]
pub enum AnyDigest {
Sha512(Sha512),
}
impl digest::Update for AnyDigest {
fn update(&mut self, data: &[u8]) {
match self {
Self::Sha512(d) => digest::Update::update(d, data),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, TS)]
#[ts(export, type = "string")]
pub enum AnySignature {
Ed25519(<Ed25519 as SignatureScheme>::Signature),
}
impl FromStr for AnySignature {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use der::DecodePem;
#[derive(der::Sequence)]
struct AnySignatureDer {
alg: pkcs8::spki::AlgorithmIdentifierOwned,
sig: der::asn1::OctetString,
}
impl der::pem::PemLabel for AnySignatureDer {
const PEM_LABEL: &'static str = "SIGNATURE";
}
let der = AnySignatureDer::from_pem(s.as_bytes()).with_kind(ErrorKind::Deserialization)?;
if der.alg.oid == ed25519_dalek::pkcs8::ALGORITHM_ID.oid
&& der.alg.parameters.owned_to_ref() == ed25519_dalek::pkcs8::ALGORITHM_ID.parameters
{
Ok(Self::Ed25519(
ed25519_dalek::Signature::from_slice(der.sig.as_bytes())
.with_kind(ErrorKind::Deserialization)?,
))
} else {
Err(pkcs8::spki::Error::OidUnknown { oid: der.alg.oid })
.with_kind(ErrorKind::Deserialization)
}
}
}
impl Display for AnySignature {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use der::EncodePem;
#[derive(der::Sequence)]
struct AnySignatureDer<'a> {
alg: pkcs8::AlgorithmIdentifierRef<'a>,
sig: der::asn1::OctetString,
}
impl<'a> der::pem::PemLabel for AnySignatureDer<'a> {
const PEM_LABEL: &'static str = "SIGNATURE";
}
f.write_str(
&match self {
Self::Ed25519(s) => AnySignatureDer {
alg: ed25519_dalek::pkcs8::ALGORITHM_ID,
sig: der::asn1::OctetString::new(s.to_bytes()).map_err(|_| std::fmt::Error)?,
},
}
.to_pem(der::pem::LineEnding::LF)
.map_err(|_| std::fmt::Error)?,
)
}
}
impl<'de> Deserialize<'de> for AnySignature {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserialize_from_str(deserializer)
}
}
impl Serialize for AnySignature {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serialize_display(self, serializer)
}
}

View File

@@ -10,20 +10,24 @@ use futures::future::BoxFuture;
use helpers::TimedResource;
use imbl_value::InternedString;
use tokio::sync::Mutex;
use ts_rs::TS;
#[allow(unused_imports)]
use crate::prelude::*;
use crate::util::clap::FromStrParser;
use crate::util::new_guid;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize)]
pub struct RequestGuid(InternedString);
impl RequestGuid {
#[derive(
Debug, Clone, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize, TS,
)]
#[ts(type = "string")]
pub struct Guid(InternedString);
impl Guid {
pub fn new() -> Self {
Self(new_guid())
}
pub fn from(r: &str) -> Option<RequestGuid> {
pub fn from(r: &str) -> Option<Guid> {
if r.len() != 32 {
return None;
}
@@ -32,21 +36,21 @@ impl RequestGuid {
return None;
}
}
Some(RequestGuid(InternedString::intern(r)))
Some(Guid(InternedString::intern(r)))
}
}
impl AsRef<str> for RequestGuid {
impl AsRef<str> for Guid {
fn as_ref(&self) -> &str {
self.0.as_ref()
}
}
impl FromStr for RequestGuid {
impl FromStr for Guid {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::from(s).ok_or_else(|| Error::new(eyre!("invalid guid"), ErrorKind::Deserialization))
}
}
impl ValueParserFactory for RequestGuid {
impl ValueParserFactory for Guid {
type Parser = FromStrParser<Self>;
fn value_parser() -> Self::Parser {
Self::Parser::new()
@@ -55,13 +59,10 @@ impl ValueParserFactory for RequestGuid {
#[test]
fn parse_guid() {
println!(
"{:?}",
RequestGuid::from(&format!("{}", RequestGuid::new()))
)
println!("{:?}", Guid::from(&format!("{}", Guid::new())))
}
impl std::fmt::Display for RequestGuid {
impl std::fmt::Display for Guid {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
@@ -91,7 +92,7 @@ impl RpcContinuation {
}
}
pub struct RpcContinuations(Mutex<BTreeMap<RequestGuid, RpcContinuation>>);
pub struct RpcContinuations(Mutex<BTreeMap<Guid, RpcContinuation>>);
impl RpcContinuations {
pub fn new() -> Self {
RpcContinuations(Mutex::new(BTreeMap::new()))
@@ -112,12 +113,12 @@ impl RpcContinuations {
}
#[instrument(skip_all)]
pub async fn add(&self, guid: RequestGuid, handler: RpcContinuation) {
pub async fn add(&self, guid: Guid, handler: RpcContinuation) {
self.clean().await;
self.0.lock().await.insert(guid, handler);
}
pub async fn get_ws_handler(&self, guid: &RequestGuid) -> Option<WebSocketHandler> {
pub async fn get_ws_handler(&self, guid: &Guid) -> Option<WebSocketHandler> {
let mut continuations = self.0.lock().await;
if !matches!(continuations.get(guid), Some(RpcContinuation::WebSocket(_))) {
return None;
@@ -128,8 +129,8 @@ impl RpcContinuations {
x.get().await
}
pub async fn get_rest_handler(&self, guid: &RequestGuid) -> Option<RestHandler> {
let mut continuations: tokio::sync::MutexGuard<'_, BTreeMap<RequestGuid, RpcContinuation>> =
pub async fn get_rest_handler(&self, guid: &Guid) -> Option<RestHandler> {
let mut continuations: tokio::sync::MutexGuard<'_, BTreeMap<Guid, RpcContinuation>> =
self.0.lock().await;
if !matches!(continuations.get(guid), Some(RpcContinuation::Rest(_))) {
return None;

View File

@@ -1,24 +1,35 @@
use std::path::Path;
use crate::Error;
use tokio::process::Command;
use crate::prelude::*;
use crate::util::Invoke;
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
pub struct GitHash(String);
impl GitHash {
pub async fn from_path(path: impl AsRef<Path>) -> Result<GitHash, Error> {
let hash = tokio::process::Command::new("git")
.args(["describe", "--always", "--abbrev=40", "--dirty=-modified"])
.current_dir(path)
.output()
.await?;
if !hash.status.success() {
return Err(Error::new(
color_eyre::eyre::eyre!("Could not get hash: {}", String::from_utf8(hash.stderr)?),
crate::ErrorKind::Filesystem,
));
let mut hash = String::from_utf8(
Command::new("git")
.arg("rev-parse")
.arg("HEAD")
.current_dir(&path)
.invoke(ErrorKind::Git)
.await?,
)?;
if Command::new("git")
.arg("diff-index")
.arg("--quiet")
.arg("HEAD")
.arg("--")
.invoke(ErrorKind::Git)
.await
.is_err()
{
hash += "-modified";
}
Ok(GitHash(String::from_utf8(hash.stdout)?))
Ok(GitHash(hash))
}
}

View File

@@ -12,11 +12,12 @@ use itertools::Itertools;
use tokio::io::AsyncRead;
use crate::prelude::*;
use crate::s9pk::merkle_archive::sink::{Sink, TrackingWriter};
use crate::s9pk::merkle_archive::sink::Sink;
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section};
use crate::s9pk::merkle_archive::write_queue::WriteQueue;
use crate::s9pk::merkle_archive::{varint, Entry, EntryContents};
use crate::util::io::ParallelBlake3Writer;
use crate::util::io::{ParallelBlake3Writer, TrackingIO};
use crate::CAP_10_MiB;
#[derive(Clone)]
pub struct DirectoryContents<S> {
@@ -151,7 +152,7 @@ impl<S: Clone> DirectoryContents<S> {
Ok(())
}
}
impl<S: ArchiveSource> DirectoryContents<Section<S>> {
impl<S: ArchiveSource + Clone> DirectoryContents<Section<S>> {
#[instrument(skip_all)]
pub fn deserialize<'a>(
source: &'a S,
@@ -181,7 +182,7 @@ impl<S: ArchiveSource> DirectoryContents<Section<S>> {
let mut entries = OrdMap::new();
for _ in 0..len {
let name = varint::deserialize_varstring(&mut toc_reader).await?;
let entry = Entry::deserialize(source, &mut toc_reader).await?;
let entry = Entry::deserialize(source.clone(), &mut toc_reader).await?;
entries.insert(name.into(), entry);
}
@@ -202,7 +203,7 @@ impl<S: ArchiveSource> DirectoryContents<Section<S>> {
.boxed()
}
}
impl<S: FileSource> DirectoryContents<S> {
impl<S: FileSource + Clone> DirectoryContents<S> {
pub fn filter(&mut self, filter: impl Fn(&Path) -> bool) -> Result<(), Error> {
for k in self.keys().cloned().collect::<Vec<_>>() {
let path = Path::new(&*k);
@@ -239,8 +240,7 @@ impl<S: FileSource> DirectoryContents<S> {
#[instrument(skip_all)]
pub fn sighash<'a>(&'a self) -> BoxFuture<'a, Result<Hash, Error>> {
async move {
let mut hasher =
TrackingWriter::new(0, ParallelBlake3Writer::new(super::hash::BUFFER_CAPACITY));
let mut hasher = TrackingIO::new(0, ParallelBlake3Writer::new(CAP_10_MiB));
let mut sig_contents = OrdMap::new();
for (name, entry) in &**self {
sig_contents.insert(name.clone(), entry.to_missing().await?);
@@ -280,6 +280,7 @@ impl<S: FileSource> DirectoryContents<S> {
Ok(())
}
pub fn into_dyn(self) -> DirectoryContents<DynFileSource> {
DirectoryContents {
contents: self

View File

@@ -2,9 +2,10 @@ use blake3::Hash;
use tokio::io::AsyncRead;
use crate::prelude::*;
use crate::s9pk::merkle_archive::sink::{Sink, TrackingWriter};
use crate::s9pk::merkle_archive::sink::Sink;
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section};
use crate::util::io::ParallelBlake3Writer;
use crate::util::io::{ParallelBlake3Writer, TrackingIO};
use crate::CAP_10_MiB;
#[derive(Debug, Clone)]
pub struct FileContents<S>(S);
@@ -19,7 +20,7 @@ impl<S> FileContents<S> {
impl<S: ArchiveSource> FileContents<Section<S>> {
#[instrument(skip_all)]
pub async fn deserialize(
source: &S,
source: S,
header: &mut (impl AsyncRead + Unpin + Send),
size: u64,
) -> Result<Self, Error> {
@@ -34,8 +35,7 @@ impl<S: ArchiveSource> FileContents<Section<S>> {
}
impl<S: FileSource> FileContents<S> {
pub async fn hash(&self) -> Result<(Hash, u64), Error> {
let mut hasher =
TrackingWriter::new(0, ParallelBlake3Writer::new(super::hash::BUFFER_CAPACITY));
let mut hasher = TrackingIO::new(0, ParallelBlake3Writer::new(CAP_10_MiB));
self.serialize_body(&mut hasher, None).await?;
let size = hasher.position();
let hash = hasher.into_inner().finalize().await?;

View File

@@ -6,8 +6,7 @@ use tokio_util::either::Either;
use crate::prelude::*;
use crate::util::io::{ParallelBlake3Writer, TeeWriter};
pub const BUFFER_CAPACITY: usize = 10 * 1024 * 1024; // 10MiB
use crate::CAP_10_MiB;
#[pin_project::pin_project]
pub struct VerifyingWriter<W> {
@@ -21,8 +20,8 @@ impl<W: AsyncWrite> VerifyingWriter<W> {
writer: if verify.is_some() {
Either::Left(TeeWriter::new(
w,
ParallelBlake3Writer::new(BUFFER_CAPACITY),
BUFFER_CAPACITY,
ParallelBlake3Writer::new(CAP_10_MiB),
CAP_10_MiB,
))
} else {
Either::Right(w)

View File

@@ -7,11 +7,16 @@ use sha2::{Digest, Sha512};
use tokio::io::AsyncRead;
use crate::prelude::*;
use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment;
use crate::registry::signer::sign::ed25519::Ed25519;
use crate::registry::signer::sign::SignatureScheme;
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
use crate::s9pk::merkle_archive::file_contents::FileContents;
use crate::s9pk::merkle_archive::sink::Sink;
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section};
use crate::s9pk::merkle_archive::write_queue::WriteQueue;
use crate::util::serde::Base64;
use crate::CAP_1_MiB;
pub mod directory_contents;
pub mod file_contents;
@@ -70,12 +75,13 @@ impl<S> MerkleArchive<S> {
self.contents.sort_by(sort_by)
}
}
impl<S: ArchiveSource> MerkleArchive<Section<S>> {
impl<S: ArchiveSource + Clone> MerkleArchive<Section<S>> {
#[instrument(skip_all)]
pub async fn deserialize(
source: &S,
context: &str,
header: &mut (impl AsyncRead + Unpin + Send),
commitment: Option<&MerkleArchiveCommitment>,
) -> Result<Self, Error> {
use tokio::io::AsyncReadExt;
@@ -101,6 +107,32 @@ impl<S: ArchiveSource> MerkleArchive<Section<S>> {
&signature,
)?;
if let Some(MerkleArchiveCommitment {
root_sighash,
root_maxsize,
}) = commitment
{
if sighash.as_bytes() != &**root_sighash {
return Err(Error::new(
eyre!("merkle root mismatch"),
ErrorKind::InvalidSignature,
));
}
if max_size > *root_maxsize {
return Err(Error::new(
eyre!("merkle root directory max size too large"),
ErrorKind::InvalidSignature,
));
}
} else {
if max_size > CAP_1_MiB as u64 {
return Err(Error::new(
eyre!("merkle root directory max size over 1MiB, cancelling download in case of DOS attack"),
ErrorKind::InvalidSignature,
));
}
}
let contents = DirectoryContents::deserialize(source, header, (sighash, max_size)).await?;
Ok(Self {
@@ -109,38 +141,50 @@ impl<S: ArchiveSource> MerkleArchive<Section<S>> {
})
}
}
impl<S: FileSource> MerkleArchive<S> {
impl<S: FileSource + Clone> MerkleArchive<S> {
pub async fn update_hashes(&mut self, only_missing: bool) -> Result<(), Error> {
self.contents.update_hashes(only_missing).await
}
pub fn filter(&mut self, filter: impl Fn(&Path) -> bool) -> Result<(), Error> {
self.contents.filter(filter)
}
pub async fn commitment(&self) -> Result<MerkleArchiveCommitment, Error> {
let root_maxsize = match self.signer {
Signer::Signed(_, _, s, _) => s,
_ => self.contents.toc_size(),
};
let root_sighash = self.contents.sighash().await?;
Ok(MerkleArchiveCommitment {
root_sighash: Base64(*root_sighash.as_bytes()),
root_maxsize,
})
}
pub async fn signature(&self) -> Result<Signature, Error> {
match &self.signer {
Signer::Signed(_, s, _, _) => Ok(*s),
Signer::Signer(k, context) => {
Ed25519.sign_commitment(k, &self.commitment().await?, context)
}
}
}
#[instrument(skip_all)]
pub async fn serialize<W: Sink>(&self, w: &mut W, verify: bool) -> Result<(), Error> {
use tokio::io::AsyncWriteExt;
let sighash = self.contents.sighash().await?;
let size = self.contents.toc_size();
let commitment = self.commitment().await?;
let (pubkey, signature, max_size) = match &self.signer {
Signer::Signed(pubkey, signature, max_size, _) => (*pubkey, *signature, *max_size),
Signer::Signer(s, context) => (
s.into(),
ed25519_dalek::SigningKey::sign_prehashed(
s,
Sha512::new_with_prefix(sighash.as_bytes())
.chain_update(&u64::to_be_bytes(size)),
Some(context.as_bytes()),
)?,
size,
),
let (pubkey, signature) = match &self.signer {
Signer::Signed(pubkey, signature, _, _) => (*pubkey, *signature),
Signer::Signer(s, context) => {
(s.into(), Ed25519.sign_commitment(s, &commitment, context)?)
}
};
w.write_all(pubkey.as_bytes()).await?;
w.write_all(&signature.to_bytes()).await?;
w.write_all(sighash.as_bytes()).await?;
w.write_all(&u64::to_be_bytes(max_size)).await?;
w.write_all(&*commitment.root_sighash).await?;
w.write_all(&u64::to_be_bytes(commitment.root_maxsize))
.await?;
let mut next_pos = w.current_position().await?;
next_pos += DirectoryContents::<S>::header_size();
self.contents.serialize_header(next_pos, w).await?;
@@ -216,11 +260,10 @@ impl<S> Entry<S> {
+ self.contents.header_size()
}
}
impl<S: Clone> Entry<S> {}
impl<S: ArchiveSource> Entry<Section<S>> {
impl<S: ArchiveSource + Clone> Entry<Section<S>> {
#[instrument(skip_all)]
pub async fn deserialize(
source: &S,
source: S,
header: &mut (impl AsyncRead + Unpin + Send),
) -> Result<Self, Error> {
use tokio::io::AsyncReadExt;
@@ -241,24 +284,19 @@ impl<S: ArchiveSource> Entry<Section<S>> {
})
}
}
impl<S: FileSource> Entry<S> {
impl<S: FileSource + Clone> Entry<S> {
pub fn filter(&mut self, filter: impl Fn(&Path) -> bool) -> Result<(), Error> {
if let EntryContents::Directory(d) = &mut self.contents {
d.filter(filter)?;
}
Ok(())
}
pub async fn read_file_to_vec(&self) -> Result<Vec<u8>, Error> {
match self.as_contents() {
EntryContents::File(f) => Ok(f.to_vec(self.hash).await?),
EntryContents::Directory(_) => Err(Error::new(
eyre!("expected file, found directory"),
ErrorKind::ParseS9pk,
)),
EntryContents::Missing => {
Err(Error::new(eyre!("entry is missing"), ErrorKind::ParseS9pk))
}
pub async fn update_hash(&mut self, only_missing: bool) -> Result<(), Error> {
if let EntryContents::Directory(d) = &mut self.contents {
d.update_hashes(only_missing).await?;
}
self.hash = Some(self.contents.hash().await?);
Ok(())
}
pub async fn to_missing(&self) -> Result<Self, Error> {
let hash = if let Some(hash) = self.hash {
@@ -271,13 +309,6 @@ impl<S: FileSource> Entry<S> {
contents: EntryContents::Missing,
})
}
pub async fn update_hash(&mut self, only_missing: bool) -> Result<(), Error> {
if let EntryContents::Directory(d) = &mut self.contents {
d.update_hashes(only_missing).await?;
}
self.hash = Some(self.contents.hash().await?);
Ok(())
}
#[instrument(skip_all)]
pub async fn serialize_header<W: Sink>(
&self,
@@ -302,6 +333,20 @@ impl<S: FileSource> Entry<S> {
}
}
}
impl<S: FileSource> Entry<S> {
pub async fn read_file_to_vec(&self) -> Result<Vec<u8>, Error> {
match self.as_contents() {
EntryContents::File(f) => Ok(f.to_vec(self.hash).await?),
EntryContents::Directory(_) => Err(Error::new(
eyre!("expected file, found directory"),
ErrorKind::ParseS9pk,
)),
EntryContents::Missing => {
Err(Error::new(eyre!("entry is missing"), ErrorKind::ParseS9pk))
}
}
}
}
#[derive(Debug, Clone)]
pub enum EntryContents<S> {
@@ -329,10 +374,10 @@ impl<S> EntryContents<S> {
matches!(self, &EntryContents::Directory(_))
}
}
impl<S: ArchiveSource> EntryContents<Section<S>> {
impl<S: ArchiveSource + Clone> EntryContents<Section<S>> {
#[instrument(skip_all)]
pub async fn deserialize(
source: &S,
source: S,
header: &mut (impl AsyncRead + Unpin + Send),
(hash, size): (Hash, u64),
) -> Result<Self, Error> {
@@ -346,7 +391,7 @@ impl<S: ArchiveSource> EntryContents<Section<S>> {
FileContents::deserialize(source, header, size).await?,
)),
2 => Ok(Self::Directory(
DirectoryContents::deserialize(source, header, (hash, size)).await?,
DirectoryContents::deserialize(&source, header, (hash, size)).await?,
)),
id => Err(Error::new(
eyre!("Unknown type id {id} found in MerkleArchive"),
@@ -355,7 +400,7 @@ impl<S: ArchiveSource> EntryContents<Section<S>> {
}
}
}
impl<S: FileSource> EntryContents<S> {
impl<S: FileSource + Clone> EntryContents<S> {
pub async fn hash(&self) -> Result<(Hash, u64), Error> {
match self {
Self::Missing => Err(Error::new(
@@ -366,6 +411,15 @@ impl<S: FileSource> EntryContents<S> {
Self::Directory(d) => Ok((d.sighash().await?, d.toc_size())),
}
}
pub fn into_dyn(self) -> EntryContents<DynFileSource> {
match self {
Self::Missing => EntryContents::Missing,
Self::File(f) => EntryContents::File(f.into_dyn()),
Self::Directory(d) => EntryContents::Directory(d.into_dyn()),
}
}
}
impl<S: FileSource> EntryContents<S> {
#[instrument(skip_all)]
pub async fn serialize_header<W: Sink>(
&self,
@@ -381,11 +435,4 @@ impl<S: FileSource> EntryContents<S> {
Self::Directory(d) => Some(d.serialize_header(position, w).await?),
})
}
pub fn into_dyn(self) -> EntryContents<DynFileSource> {
match self {
Self::Missing => EntryContents::Missing,
Self::File(f) => EntryContents::File(f.into_dyn()),
Self::Directory(d) => EntryContents::Directory(d.into_dyn()),
}
}
}

View File

@@ -1,6 +1,7 @@
use tokio::io::{AsyncSeek, AsyncWrite};
use crate::prelude::*;
use crate::util::io::TrackingIO;
#[async_trait::async_trait]
pub trait Sink: AsyncWrite + Unpin + Send {
@@ -17,57 +18,8 @@ impl<S: AsyncWrite + AsyncSeek + Unpin + Send> Sink for S {
}
#[async_trait::async_trait]
impl<W: AsyncWrite + Unpin + Send> Sink for TrackingWriter<W> {
impl<W: AsyncWrite + Unpin + Send> Sink for TrackingIO<W> {
async fn current_position(&mut self) -> Result<u64, Error> {
Ok(self.position)
}
}
#[pin_project::pin_project]
pub struct TrackingWriter<W> {
position: u64,
#[pin]
writer: W,
}
impl<W> TrackingWriter<W> {
pub fn new(start: u64, w: W) -> Self {
Self {
position: start,
writer: w,
}
}
pub fn position(&self) -> u64 {
self.position
}
pub fn into_inner(self) -> W {
self.writer
}
}
impl<W: AsyncWrite + Unpin + Send> AsyncWrite for TrackingWriter<W> {
fn poll_write(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &[u8],
) -> std::task::Poll<Result<usize, std::io::Error>> {
let this = self.project();
match this.writer.poll_write(cx, buf) {
std::task::Poll::Ready(Ok(written)) => {
*this.position += written as u64;
std::task::Poll::Ready(Ok(written))
}
a => a,
}
}
fn poll_flush(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), std::io::Error>> {
self.project().writer.poll_flush(cx)
}
fn poll_shutdown(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), std::io::Error>> {
self.project().writer.poll_shutdown(cx)
Ok(self.position())
}
}

View File

@@ -1,23 +1,25 @@
use std::collections::BTreeSet;
use std::pin::Pin;
use std::sync::{Arc, Mutex};
use std::task::Poll;
use bytes::Bytes;
use futures::stream::BoxStream;
use futures::{StreamExt, TryStreamExt};
use futures::{Stream, StreamExt, TryStreamExt};
use reqwest::header::{ACCEPT_RANGES, CONTENT_LENGTH, RANGE};
use reqwest::{Client, Url};
use tokio::io::{AsyncRead, AsyncReadExt, Take};
use tokio::io::{AsyncRead, AsyncReadExt, ReadBuf, Take};
use tokio_util::io::StreamReader;
use crate::prelude::*;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::io::TrackingIO;
use crate::util::Apply;
#[derive(Clone)]
pub struct HttpSource {
url: Url,
client: Client,
size: Option<u64>,
range_support: Result<
(),
(), // Arc<Mutex<Option<RangelessReader>>>
>,
range_support: Result<(), Arc<Mutex<BTreeSet<TrackingIO<HttpBodyReader>>>>>,
}
impl HttpSource {
pub async fn new(client: Client, url: Url) -> Result<Self, Error> {
@@ -32,7 +34,8 @@ impl HttpSource {
.headers()
.get(ACCEPT_RANGES)
.and_then(|s| s.to_str().ok())
== Some("bytes");
== Some("bytes")
&& false;
let size = head
.headers()
.get(CONTENT_LENGTH)
@@ -45,53 +48,141 @@ impl HttpSource {
range_support: if range_support {
Ok(())
} else {
Err(()) // Err(Arc::new(Mutex::new(None)))
Err(Arc::new(Mutex::new(BTreeSet::new())))
},
})
}
}
impl ArchiveSource for HttpSource {
type Reader = Take<HttpReader>;
type Reader = HttpReader;
async fn size(&self) -> Option<u64> {
self.size
}
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
Ok(StreamReader::new(
self.client
.get(self.url.clone())
.send()
.await
.with_kind(ErrorKind::Network)?
.error_for_status()
.with_kind(ErrorKind::Network)?
.bytes_stream()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
.apply(boxed),
))
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
match self.range_support {
Ok(_) => Ok(HttpReader::Range(StreamReader::new(if size > 0 {
self.client
.get(self.url.clone())
.header(RANGE, format!("bytes={}-{}", position, position + size - 1))
.send()
.await
.with_kind(ErrorKind::Network)?
.error_for_status()
.with_kind(ErrorKind::Network)?
.bytes_stream()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
.boxed()
} else {
futures::stream::empty().boxed()
}))
.take(size)),
_ => todo!(),
match &self.range_support {
Ok(_) => Ok(HttpReader::Range(
StreamReader::new(if size > 0 {
self.client
.get(self.url.clone())
.header(RANGE, format!("bytes={}-{}", position, position + size - 1))
.send()
.await
.with_kind(ErrorKind::Network)?
.error_for_status()
.with_kind(ErrorKind::Network)?
.bytes_stream()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
.apply(boxed)
} else {
futures::stream::empty().apply(boxed)
})
.take(size),
)),
Err(pool) => {
fn get_reader_for(
pool: &Arc<Mutex<BTreeSet<TrackingIO<HttpBodyReader>>>>,
position: u64,
) -> Option<TrackingIO<HttpBodyReader>> {
let mut lock = pool.lock().unwrap();
let pos = lock.range(..position).last()?.position();
lock.take(&pos)
}
let reader = get_reader_for(pool, position);
let mut reader = if let Some(reader) = reader {
reader
} else {
TrackingIO::new(
0,
StreamReader::new(
self.client
.get(self.url.clone())
.send()
.await
.with_kind(ErrorKind::Network)?
.error_for_status()
.with_kind(ErrorKind::Network)?
.bytes_stream()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
.apply(boxed),
),
)
};
if reader.position() < position {
let to_skip = position - reader.position();
tokio::io::copy(&mut (&mut reader).take(to_skip), &mut tokio::io::sink())
.await?;
}
Ok(HttpReader::Rangeless {
pool: pool.clone(),
reader: Some(reader.take(size)),
})
}
}
}
}
#[pin_project::pin_project(project = HttpReaderProj)]
type BoxStream<'a, T> = Pin<Box<dyn Stream<Item = T> + Send + Sync + 'a>>;
fn boxed<'a, T>(stream: impl Stream<Item = T> + Send + Sync + 'a) -> BoxStream<'a, T> {
Box::pin(stream)
}
type HttpBodyReader = StreamReader<BoxStream<'static, Result<Bytes, std::io::Error>>, Bytes>;
#[pin_project::pin_project(project = HttpReaderProj, PinnedDrop)]
pub enum HttpReader {
Range(#[pin] StreamReader<BoxStream<'static, Result<Bytes, std::io::Error>>, Bytes>),
// Rangeless(#[pin] RangelessReader),
Range(#[pin] Take<HttpBodyReader>),
Rangeless {
pool: Arc<Mutex<BTreeSet<TrackingIO<HttpBodyReader>>>>,
#[pin]
reader: Option<Take<TrackingIO<HttpBodyReader>>>,
},
}
impl AsyncRead for HttpReader {
fn poll_read(
self: std::pin::Pin<&mut Self>,
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut tokio::io::ReadBuf<'_>,
) -> std::task::Poll<std::io::Result<()>> {
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
match self.project() {
HttpReaderProj::Range(r) => r.poll_read(cx, buf),
// HttpReaderProj::Rangeless(r) => r.poll_read(cx, buf),
HttpReaderProj::Rangeless { mut reader, .. } => {
let mut finished = false;
if let Some(reader) = reader.as_mut().as_pin_mut() {
let start = buf.filled().len();
futures::ready!(reader.poll_read(cx, buf)?);
finished = start == buf.filled().len();
}
if finished {
reader.take();
}
Poll::Ready(Ok(()))
}
}
}
}
#[pin_project::pinned_drop]
impl PinnedDrop for HttpReader {
fn drop(self: Pin<&mut Self>) {
match self.project() {
HttpReaderProj::Range(_) => (),
HttpReaderProj::Rangeless { pool, mut reader } => {
if let Some(reader) = reader.take() {
pool.lock().unwrap().insert(reader.into_inner());
}
}
}
}
}

View File

@@ -1,3 +1,4 @@
use std::ops::Deref;
use std::path::PathBuf;
use std::sync::Arc;
@@ -13,7 +14,7 @@ use crate::s9pk::merkle_archive::hash::VerifyingWriter;
pub mod http;
pub mod multi_cursor_file;
pub trait FileSource: Clone + Send + Sync + Sized + 'static {
pub trait FileSource: Send + Sync + Sized + 'static {
type Reader: AsyncRead + Unpin + Send;
fn size(&self) -> impl Future<Output = Result<u64, Error>> + Send;
fn reader(&self) -> impl Future<Output = Result<Self::Reader, Error>> + Send;
@@ -61,6 +62,29 @@ pub trait FileSource: Clone + Send + Sync + Sized + 'static {
}
}
impl<T: FileSource> FileSource for Arc<T> {
type Reader = T::Reader;
async fn size(&self) -> Result<u64, Error> {
self.deref().size().await
}
async fn reader(&self) -> Result<Self::Reader, Error> {
self.deref().reader().await
}
async fn copy<W: AsyncWrite + Unpin + Send + ?Sized>(&self, w: &mut W) -> Result<(), Error> {
self.deref().copy(w).await
}
async fn copy_verify<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
w: &mut W,
verify: Option<(Hash, u64)>,
) -> Result<(), Error> {
self.deref().copy_verify(w, verify).await
}
async fn to_vec(&self, verify: Option<(Hash, u64)>) -> Result<Vec<u8>, Error> {
self.deref().to_vec(verify).await
}
}
#[derive(Clone)]
pub struct DynFileSource(Arc<dyn DynableFileSource>);
impl DynFileSource {
@@ -155,16 +179,28 @@ impl FileSource for Arc<[u8]> {
}
}
pub trait ArchiveSource: Clone + Send + Sync + Sized + 'static {
pub trait ArchiveSource: Send + Sync + Sized + 'static {
type Reader: AsyncRead + Unpin + Send;
fn size(&self) -> impl Future<Output = Option<u64>> + Send {
async { None }
}
fn fetch_all(
&self,
) -> impl Future<Output = Result<impl AsyncRead + Unpin + Send, Error>> + Send;
fn fetch(
&self,
position: u64,
size: u64,
) -> impl Future<Output = Result<Self::Reader, Error>> + Send;
fn copy_all_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
w: &mut W,
) -> impl Future<Output = Result<(), Error>> + Send {
async move {
tokio::io::copy(&mut self.fetch_all().await?, w).await?;
Ok(())
}
}
fn copy_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
position: u64,
@@ -176,17 +212,47 @@ pub trait ArchiveSource: Clone + Send + Sync + Sized + 'static {
Ok(())
}
}
fn section(&self, position: u64, size: u64) -> Section<Self> {
fn section(self, position: u64, size: u64) -> Section<Self> {
Section {
source: self.clone(),
source: self,
position,
size,
}
}
}
impl<T: ArchiveSource> ArchiveSource for Arc<T> {
type Reader = T::Reader;
async fn size(&self) -> Option<u64> {
self.deref().size().await
}
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
self.deref().fetch_all().await
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
self.deref().fetch(position, size).await
}
async fn copy_all_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
w: &mut W,
) -> Result<(), Error> {
self.deref().copy_all_to(w).await
}
async fn copy_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
position: u64,
size: u64,
w: &mut W,
) -> Result<(), Error> {
self.deref().copy_to(position, size, w).await
}
}
impl ArchiveSource for Arc<[u8]> {
type Reader = tokio::io::Take<std::io::Cursor<Self>>;
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
Ok(std::io::Cursor::new(self.clone()))
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
use tokio::io::AsyncReadExt;

View File

@@ -1,18 +1,35 @@
use std::io::SeekFrom;
use std::os::fd::{AsRawFd, RawFd};
use std::path::{Path, PathBuf};
use std::pin::Pin;
use std::sync::Arc;
use std::task::Poll;
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncReadExt};
use tokio::io::{AsyncRead, AsyncReadExt, ReadBuf, Take};
use tokio::sync::{Mutex, OwnedMutexGuard};
use crate::disk::mount::filesystem::loop_dev::LoopDev;
use crate::prelude::*;
use crate::s9pk::merkle_archive::source::{ArchiveSource, Section};
fn path_from_fd(fd: RawFd) -> PathBuf {
Path::new("/proc/self/fd").join(fd.to_string())
fn path_from_fd(fd: RawFd) -> Result<PathBuf, Error> {
#[cfg(target_os = "linux")]
let path = Path::new("/proc/self/fd").join(fd.to_string());
#[cfg(target_os = "macos")] // here be dragons
let path = unsafe {
let mut buf = [0u8; libc::PATH_MAX as usize];
if libc::fcntl(fd, libc::F_GETPATH, buf.as_mut_ptr().cast::<libc::c_char>()) == -1 {
return Err(std::io::Error::last_os_error().into());
}
Path::new(
&*std::ffi::CStr::from_bytes_until_nul(&buf)
.with_kind(ErrorKind::Utf8)?
.to_string_lossy(),
)
.to_owned()
};
Ok(path)
}
#[derive(Clone)]
@@ -21,18 +38,26 @@ pub struct MultiCursorFile {
file: Arc<Mutex<File>>,
}
impl MultiCursorFile {
fn path(&self) -> PathBuf {
fn path(&self) -> Result<PathBuf, Error> {
path_from_fd(self.fd)
}
pub async fn open(fd: &impl AsRawFd) -> Result<Self, Error> {
let fd = fd.as_raw_fd();
Ok(Self {
fd,
file: Arc::new(Mutex::new(File::open(path_from_fd(fd)).await?)),
})
let f = File::open(path_from_fd(fd.as_raw_fd())?).await?;
Ok(Self::from(f))
}
pub async fn cursor(&self) -> Result<FileCursor, Error> {
Ok(FileCursor(
if let Ok(file) = self.file.clone().try_lock_owned() {
file
} else {
Arc::new(Mutex::new(File::open(self.path()?).await?))
.try_lock_owned()
.expect("freshly created")
},
))
}
pub async fn blake3_mmap(&self) -> Result<blake3::Hash, Error> {
let path = self.path();
let path = self.path()?;
tokio::task::spawn_blocking(move || {
let mut hasher = blake3::Hasher::new();
hasher.update_mmap_rayon(path)?;
@@ -52,76 +77,44 @@ impl From<File> for MultiCursorFile {
}
#[pin_project::pin_project]
pub struct FileSectionReader {
#[pin]
file: OwnedMutexGuard<File>,
remaining: u64,
}
impl AsyncRead for FileSectionReader {
pub struct FileCursor(#[pin] OwnedMutexGuard<File>);
impl AsyncRead for FileCursor {
fn poll_read(
self: std::pin::Pin<&mut Self>,
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut tokio::io::ReadBuf<'_>,
) -> std::task::Poll<std::io::Result<()>> {
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
let this = self.project();
if *this.remaining == 0 {
return std::task::Poll::Ready(Ok(()));
}
let before = buf.filled().len() as u64;
let res = std::pin::Pin::new(&mut (&mut **this.file.get_mut()).take(*this.remaining))
.poll_read(cx, buf);
*this.remaining = this
.remaining
.saturating_sub(buf.filled().len() as u64 - before);
res
Pin::new(&mut (&mut **this.0.get_mut())).poll_read(cx, buf)
}
}
impl ArchiveSource for MultiCursorFile {
type Reader = FileSectionReader;
type Reader = Take<FileCursor>;
async fn size(&self) -> Option<u64> {
tokio::fs::metadata(self.path()).await.ok().map(|m| m.len())
tokio::fs::metadata(self.path().ok()?)
.await
.ok()
.map(|m| m.len())
}
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
use tokio::io::AsyncSeekExt;
let mut file = self.cursor().await?;
file.0.seek(SeekFrom::Start(0)).await?;
Ok(file)
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
use tokio::io::AsyncSeekExt;
let mut file = if let Ok(file) = self.file.clone().try_lock_owned() {
file
} else {
#[cfg(target_os = "linux")]
let file = File::open(self.path()).await?;
#[cfg(target_os = "macos")] // here be dragons
let file = unsafe {
let mut buf = [0u8; libc::PATH_MAX as usize];
if libc::fcntl(
self.fd,
libc::F_GETPATH,
buf.as_mut_ptr().cast::<libc::c_char>(),
) == -1
{
return Err(std::io::Error::last_os_error().into());
}
File::open(
&*std::ffi::CStr::from_bytes_until_nul(&buf)
.with_kind(ErrorKind::Utf8)?
.to_string_lossy(),
)
.await?
};
Arc::new(Mutex::new(file))
.try_lock_owned()
.expect("freshly created")
};
file.seek(SeekFrom::Start(position)).await?;
Ok(Self::Reader {
file,
remaining: size,
})
let mut file = self.cursor().await?;
file.0.seek(SeekFrom::Start(position)).await?;
Ok(file.take(size))
}
}
impl From<&Section<MultiCursorFile>> for LoopDev<PathBuf> {
fn from(value: &Section<MultiCursorFile>) -> Self {
LoopDev::new(value.source.path(), value.position, value.size)
LoopDev::new(value.source.path().unwrap(), value.position, value.size)
}
}

View File

@@ -8,9 +8,9 @@ use ed25519_dalek::SigningKey;
use crate::prelude::*;
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
use crate::s9pk::merkle_archive::file_contents::FileContents;
use crate::s9pk::merkle_archive::sink::TrackingWriter;
use crate::s9pk::merkle_archive::source::FileSource;
use crate::s9pk::merkle_archive::{Entry, EntryContents, MerkleArchive};
use crate::util::io::TrackingIO;
/// Creates a MerkleArchive (a1) with the provided files at the provided paths. NOTE: later files can overwrite previous files/directories at the same path
/// Tests:
@@ -60,10 +60,15 @@ fn test(files: Vec<(PathBuf, String)>) -> Result<(), Error> {
.block_on(async move {
a1.update_hashes(true).await?;
let mut s1 = Vec::new();
a1.serialize(&mut TrackingWriter::new(0, &mut s1), true)
.await?;
a1.serialize(&mut TrackingIO::new(0, &mut s1), true).await?;
let s1: Arc<[u8]> = s1.into();
let a2 = MerkleArchive::deserialize(&s1, "test", &mut Cursor::new(s1.clone())).await?;
let a2 = MerkleArchive::deserialize(
&s1,
"test",
&mut Cursor::new(s1.clone()),
Some(&a1.commitment().await?),
)
.await?;
for (path, content) in check_set {
match a2
@@ -88,8 +93,7 @@ fn test(files: Vec<(PathBuf, String)>) -> Result<(), Error> {
}
let mut s2 = Vec::new();
a2.serialize(&mut TrackingWriter::new(0, &mut s2), true)
.await?;
a2.serialize(&mut TrackingIO::new(0, &mut s2), true).await?;
let s2: Arc<[u8]> = s2.into();
ensure_code!(s1 == s2, ErrorKind::Pack, "s1 does not match s2");

View File

@@ -30,6 +30,8 @@ impl<'a, S: FileSource> WriteQueue<'a, S> {
self.queue.push_back(entry);
Ok(res)
}
}
impl<'a, S: FileSource + Clone> WriteQueue<'a, S> {
pub async fn serialize<W: Sink>(&mut self, w: &mut W, verify: bool) -> Result<(), Error> {
loop {
let Some(next) = self.queue.pop_front() else {

View File

@@ -1,3 +1,4 @@
pub mod git_hash;
pub mod merkle_archive;
pub mod rpc;
pub mod v1;

View File

@@ -172,14 +172,14 @@ async fn add_image(
.join(&arch)
.join(&id)
.with_extension("env"),
Entry::file(DynFileSource::new(Arc::from(Vec::from(env)))),
Entry::file(DynFileSource::new(Arc::<[u8]>::from(Vec::from(env)))),
)?;
archive.contents_mut().insert_path(
Path::new("images")
.join(&arch)
.join(&id)
.with_extension("json"),
Entry::file(DynFileSource::new(Arc::from(
Entry::file(DynFileSource::new(Arc::<[u8]>::from(
serde_json::to_vec(&serde_json::json!({
"workdir": workdir
}))

View File

@@ -3,35 +3,38 @@ use std::path::{Path, PathBuf};
use emver::VersionRange;
use imbl_value::InOMap;
use indexmap::IndexMap;
pub use models::PackageId;
use models::VolumeId;
use models::{ActionId, HealthCheckId, ImageId, VolumeId};
use serde::{Deserialize, Serialize};
use url::Url;
use super::git_hash::GitHash;
use crate::prelude::*;
use crate::s9pk::git_hash::GitHash;
use crate::s9pk::manifest::{Alerts, Description, HardwareRequirements};
use crate::util::Version;
use crate::util::serde::{Duration, IoFormat};
use crate::util::VersionString;
use crate::version::{Current, VersionT};
fn current_version() -> Version {
fn current_version() -> VersionString {
Current::new().semver().into()
}
#[derive(Clone, Debug, Deserialize, Serialize, HasModel)]
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
#[model = "Model<Self>"]
pub struct Manifest {
#[serde(default = "current_version")]
pub eos_version: Version,
pub eos_version: VersionString,
pub id: PackageId,
#[serde(default)]
pub git_hash: Option<GitHash>,
pub title: String,
pub version: VersionString,
pub description: Description,
#[serde(default)]
pub assets: Assets,
pub title: String,
pub version: Version,
pub description: Description,
#[serde(default)]
pub build: Option<Vec<String>>,
pub release_notes: String,
pub license: String, // type of license
pub wrapper_repo: Url,
@@ -41,10 +44,23 @@ pub struct Manifest {
pub donation_url: Option<Url>,
#[serde(default)]
pub alerts: Alerts,
pub main: PackageProcedure,
pub health_checks: HealthChecks,
pub config: Option<ConfigActions>,
pub properties: Option<PackageProcedure>,
pub volumes: BTreeMap<VolumeId, Value>,
// #[serde(default)]
// pub interfaces: Interfaces,
// #[serde(default)]
pub backup: BackupActions,
#[serde(default)]
pub migrations: Migrations,
#[serde(default)]
pub actions: BTreeMap<ActionId, Action>,
// #[serde(default)]
// pub permissions: Permissions,
#[serde(default)]
pub dependencies: BTreeMap<PackageId, DepInfo>,
pub config: Option<InOMap<String, Value>>,
#[serde(default)]
pub replaces: Vec<String>,
@@ -53,6 +69,123 @@ pub struct Manifest {
pub hardware_requirements: HardwareRequirements,
}
impl Manifest {
pub fn package_procedures(&self) -> impl Iterator<Item = &PackageProcedure> {
use std::iter::once;
let main = once(&self.main);
let cfg_get = self.config.as_ref().map(|a| &a.get).into_iter();
let cfg_set = self.config.as_ref().map(|a| &a.set).into_iter();
let props = self.properties.iter();
let backups = vec![&self.backup.create, &self.backup.restore].into_iter();
let migrations = self
.migrations
.to
.values()
.chain(self.migrations.from.values());
let actions = self.actions.values().map(|a| &a.implementation);
main.chain(cfg_get)
.chain(cfg_set)
.chain(props)
.chain(backups)
.chain(migrations)
.chain(actions)
}
}
#[derive(Clone, Debug, Deserialize, Serialize, HasModel)]
#[serde(rename_all = "kebab-case")]
#[serde(tag = "type")]
#[model = "Model<Self>"]
pub enum PackageProcedure {
Docker(DockerProcedure),
Script(Value),
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct DockerProcedure {
pub image: ImageId,
#[serde(default)]
pub system: bool,
pub entrypoint: String,
#[serde(default)]
pub args: Vec<String>,
#[serde(default)]
pub inject: bool,
#[serde(default)]
pub mounts: BTreeMap<VolumeId, PathBuf>,
#[serde(default)]
pub io_format: Option<IoFormat>,
#[serde(default)]
pub sigterm_timeout: Option<Duration>,
#[serde(default)]
pub shm_size_mb: Option<usize>, // TODO: use postfix sizing? like 1k vs 1m vs 1g
#[serde(default)]
pub gpu_acceleration: bool,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct HealthChecks(pub BTreeMap<HealthCheckId, HealthCheck>);
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct HealthCheck {
pub name: String,
pub success_message: Option<String>,
#[serde(flatten)]
implementation: PackageProcedure,
pub timeout: Option<Duration>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ConfigActions {
pub get: PackageProcedure,
pub set: PackageProcedure,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct BackupActions {
pub create: PackageProcedure,
pub restore: PackageProcedure,
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct Migrations {
pub from: IndexMap<VersionRange, PackageProcedure>,
pub to: IndexMap<VersionRange, PackageProcedure>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct Action {
pub name: String,
pub description: String,
#[serde(default)]
pub warning: Option<String>,
pub implementation: PackageProcedure,
// pub allowed_statuses: Vec<DockerStatus>,
// #[serde(default)]
// pub input_spec: ConfigSpec,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct DepInfo {
pub version: VersionRange,
pub requirement: DependencyRequirement,
pub description: Option<String>,
#[serde(default)]
pub config: Option<DependencyConfig>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct DependencyConfig {
check: PackageProcedure,
auto_configure: PackageProcedure,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
#[serde(tag = "type")]
@@ -67,15 +200,6 @@ impl DependencyRequirement {
}
}
#[derive(Clone, Debug, Deserialize, Serialize, HasModel)]
#[serde(rename_all = "kebab-case")]
#[model = "Model<Self>"]
pub struct DepInfo {
pub version: VersionRange,
pub requirement: DependencyRequirement,
pub description: Option<String>,
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct Assets {

View File

@@ -6,7 +6,6 @@ use ts_rs::TS;
pub mod builder;
pub mod docker;
pub mod git_hash;
pub mod header;
pub mod manifest;
pub mod reader;

View File

@@ -20,7 +20,7 @@ use super::header::{FileSection, Header, TableOfContents};
use super::SIG_CONTEXT;
use crate::prelude::*;
use crate::s9pk::v1::docker::DockerReader;
use crate::util::Version;
use crate::util::VersionString;
#[pin_project::pin_project]
#[derive(Debug)]
@@ -83,11 +83,11 @@ impl<'a, R: AsyncSeek + Unpin> AsyncSeek for ReadHandle<'a, R> {
pub struct ImageTag {
pub package_id: PackageId,
pub image_id: ImageId,
pub version: Version,
pub version: VersionString,
}
impl ImageTag {
#[instrument(skip_all)]
pub fn validate(&self, id: &PackageId, version: &Version) -> Result<(), Error> {
pub fn validate(&self, id: &PackageId, version: &VersionString) -> Result<(), Error> {
if id != &self.package_id {
return Err(Error::new(
eyre!(

View File

@@ -1,9 +1,10 @@
use std::collections::BTreeSet;
use std::collections::{BTreeMap, BTreeSet};
use std::io::Cursor;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use itertools::Itertools;
use models::ImageId;
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncSeek, AsyncWriteExt};
use tokio::process::Command;
@@ -16,7 +17,7 @@ use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::{FileSource, Section};
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
use crate::s9pk::rpc::SKIP_ENV;
use crate::s9pk::v1::manifest::Manifest as ManifestV1;
use crate::s9pk::v1::manifest::{Manifest as ManifestV1, PackageProcedure};
use crate::s9pk::v1::reader::S9pkReader;
use crate::s9pk::v2::{S9pk, SIG_CONTEXT};
use crate::util::io::TmpDir;
@@ -72,6 +73,17 @@ impl S9pk<Section<MultiCursorFile>> {
let manifest = from_value::<ManifestV1>(manifest_raw.clone())?;
let mut new_manifest = Manifest::from(manifest.clone());
let images: BTreeMap<ImageId, bool> = manifest
.package_procedures()
.filter_map(|p| {
if let PackageProcedure::Docker(p) = p {
Some((p.image.clone(), p.system))
} else {
None
}
})
.collect();
// LICENSE.md
let license: Arc<[u8]> = reader.license().await?.to_vec().await?.into();
archive.insert_path(
@@ -109,61 +121,14 @@ impl S9pk<Section<MultiCursorFile>> {
.input(Some(&mut reader.docker_images(&arch).await?))
.invoke(ErrorKind::Docker)
.await?;
#[derive(serde::Deserialize)]
#[serde(rename_all = "PascalCase")]
struct DockerImagesOut {
repository: Option<String>,
tag: Option<String>,
#[serde(default)]
names: Vec<String>,
}
for image in {
#[cfg(feature = "docker")]
let images = std::str::from_utf8(
&Command::new(CONTAINER_TOOL)
.arg("images")
.arg("--format=json")
.invoke(ErrorKind::Docker)
.await?,
)?
.lines()
.map(|l| serde_json::from_str::<DockerImagesOut>(l))
.collect::<Result<Vec<_>, _>>()
.with_kind(ErrorKind::Deserialization)?
.into_iter();
#[cfg(not(feature = "docker"))]
let images = serde_json::from_slice::<Vec<DockerImagesOut>>(
&Command::new(CONTAINER_TOOL)
.arg("images")
.arg("--format=json")
.invoke(ErrorKind::Docker)
.await?,
)
.with_kind(ErrorKind::Deserialization)?
.into_iter();
images
}
.flat_map(|i| {
if let (Some(repository), Some(tag)) = (i.repository, i.tag) {
vec![format!("{repository}:{tag}")]
for (image, system) in &images {
new_manifest.images.insert(image.clone());
let sqfs_path = images_dir.join(image).with_extension("squashfs");
let image_name = if *system {
format!("start9/{}:latest", image)
} else {
i.names
.into_iter()
.filter_map(|i| i.strip_prefix("docker.io/").map(|s| s.to_owned()))
.collect()
}
})
.filter_map(|i| {
i.strip_suffix(&format!(":{}", manifest.version))
.map(|s| s.to_owned())
})
.filter_map(|i| {
i.strip_prefix(&format!("start9/{}/", manifest.id))
.map(|s| s.to_owned())
}) {
new_manifest.images.insert(image.parse()?);
let sqfs_path = images_dir.join(&image).with_extension("squashfs");
let image_name = format!("start9/{}/{}:{}", manifest.id, image, manifest.version);
format!("start9/{}/{}:{}", manifest.id, image, manifest.version)
};
let id = String::from_utf8(
Command::new(CONTAINER_TOOL)
.arg("create")
@@ -323,6 +288,7 @@ impl S9pk<Section<MultiCursorFile>> {
Ok(S9pk::deserialize(
&MultiCursorFile::from(File::open(destination.as_ref()).await?),
None,
false,
)
.await?)
@@ -337,8 +303,7 @@ impl From<ManifestV1> for Manifest {
title: value.title,
version: value.version,
release_notes: value.release_notes,
license: value.license,
replaces: value.replaces,
license: value.license.into(),
wrapper_repo: value.wrapper_repo,
upstream_repo: value.upstream_repo,
support_site: value.support_site.unwrap_or_else(|| default_url.clone()),

View File

@@ -2,6 +2,7 @@ use std::collections::{BTreeMap, BTreeSet};
use color_eyre::eyre::eyre;
use helpers::const_true;
use imbl_value::InternedString;
pub use models::PackageId;
use models::{ImageId, VolumeId};
use serde::{Deserialize, Serialize};
@@ -10,12 +11,12 @@ use url::Url;
use crate::dependencies::Dependencies;
use crate::prelude::*;
use crate::s9pk::v1::git_hash::GitHash;
use crate::s9pk::git_hash::GitHash;
use crate::util::serde::Regex;
use crate::util::Version;
use crate::util::VersionString;
use crate::version::{Current, VersionT};
fn current_version() -> Version {
fn current_version() -> VersionString {
Current::new().semver().into()
}
@@ -26,12 +27,10 @@ fn current_version() -> Version {
pub struct Manifest {
pub id: PackageId,
pub title: String,
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
pub release_notes: String,
pub license: String, // type of license
#[serde(default)]
pub replaces: Vec<String>,
#[ts(type = "string")]
pub license: InternedString, // type of license
#[ts(type = "string")]
pub wrapper_repo: Url,
#[ts(type = "string")]
@@ -56,8 +55,7 @@ pub struct Manifest {
#[ts(type = "string | null")]
pub git_hash: Option<GitHash>,
#[serde(default = "current_version")]
#[ts(type = "string")]
pub os_version: Version,
pub os_version: VersionString,
#[serde(default = "const_true")]
pub has_config: bool,
}
@@ -68,9 +66,11 @@ pub struct Manifest {
pub struct HardwareRequirements {
#[serde(default)]
#[ts(type = "{ [key: string]: string }")]
device: BTreeMap<String, Regex>,
ram: Option<u64>,
pub arch: Option<Vec<String>>,
pub device: BTreeMap<String, Regex>,
#[ts(type = "number | null")]
pub ram: Option<u64>,
#[ts(type = "string[] | null")]
pub arch: Option<BTreeSet<InternedString>>,
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]

View File

@@ -7,6 +7,7 @@ use models::{mime, DataUrl, PackageId};
use tokio::fs::File;
use crate::prelude::*;
use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment;
use crate::s9pk::manifest::Manifest;
use crate::s9pk::merkle_archive::file_contents::FileContents;
use crate::s9pk::merkle_archive::sink::Sink;
@@ -96,7 +97,7 @@ impl<S> S9pk<S> {
}
}
impl<S: FileSource> S9pk<S> {
impl<S: FileSource + Clone> S9pk<S> {
pub async fn new(archive: MerkleArchive<S>, size: Option<u64>) -> Result<Self, Error> {
let manifest = extract_manifest(&archive).await?;
Ok(Self {
@@ -173,9 +174,13 @@ impl<S: FileSource> S9pk<S> {
}
}
impl<S: ArchiveSource> S9pk<Section<S>> {
impl<S: ArchiveSource + Clone> S9pk<Section<S>> {
#[instrument(skip_all)]
pub async fn deserialize(source: &S, apply_filter: bool) -> Result<Self, Error> {
pub async fn deserialize(
source: &S,
commitment: Option<&MerkleArchiveCommitment>,
apply_filter: bool,
) -> Result<Self, Error> {
use tokio::io::AsyncReadExt;
let mut header = source
@@ -193,7 +198,8 @@ impl<S: ArchiveSource> S9pk<Section<S>> {
"Invalid Magic or Unexpected Version"
);
let mut archive = MerkleArchive::deserialize(source, SIG_CONTEXT, &mut header).await?;
let mut archive =
MerkleArchive::deserialize(source, SIG_CONTEXT, &mut header, commitment).await?;
if apply_filter {
archive.filter(filter)?;
@@ -211,7 +217,7 @@ impl<S: ArchiveSource> S9pk<Section<S>> {
}
impl S9pk {
pub async fn from_file(file: File, apply_filter: bool) -> Result<Self, Error> {
Self::deserialize(&MultiCursorFile::from(file), apply_filter).await
Self::deserialize(&MultiCursorFile::from(file), None, apply_filter).await
}
pub async fn open(
path: impl AsRef<Path>,

View File

@@ -22,7 +22,7 @@ use crate::install::PKG_ARCHIVE_DIR;
use crate::lxc::ContainerId;
use crate::prelude::*;
use crate::progress::{NamedProgress, Progress};
use crate::rpc_continuations::RequestGuid;
use crate::rpc_continuations::Guid;
use crate::s9pk::S9pk;
use crate::service::service_map::InstallProgressHandles;
use crate::service::transition::TransitionKind;
@@ -254,7 +254,7 @@ impl Service {
pub async fn install(
ctx: RpcContext,
s9pk: S9pk,
src_version: Option<models::Version>,
src_version: Option<models::VersionString>,
progress: Option<InstallProgressHandles>,
) -> Result<Self, Error> {
let manifest = s9pk.as_manifest().clone();
@@ -339,7 +339,7 @@ impl Service {
Ok(())
}
pub async fn uninstall(self, target_version: Option<models::Version>) -> Result<(), Error> {
pub async fn uninstall(self, target_version: Option<models::VersionString>) -> Result<(), Error> {
self.seed
.persistent_container
.execute(ProcedureName::Uninit, to_value(&target_version)?, None) // TODO timeout
@@ -513,7 +513,7 @@ pub struct ConnectParams {
pub async fn connect_rpc(
ctx: RpcContext,
ConnectParams { id }: ConnectParams,
) -> Result<RequestGuid, Error> {
) -> Result<Guid, Error> {
let id_ref = &id;
crate::lxc::connect(
&ctx,

View File

@@ -1336,12 +1336,15 @@ async fn set_dependencies(
};
let (icon, title) = match async {
let remote_s9pk = S9pk::deserialize(
&HttpSource::new(
ctx.ctx.client.clone(),
registry_url
.join(&format!("package/v2/{}.s9pk?spec={}", dep_id, version_spec))?,
)
.await?,
&Arc::new(
HttpSource::new(
ctx.ctx.client.clone(),
registry_url
.join(&format!("package/v2/{}.s9pk?spec={}", dep_id, version_spec))?,
)
.await?,
),
None, // TODO
true,
)
.await?;

View File

@@ -89,7 +89,7 @@ impl ServiceMap {
}
#[instrument(skip_all)]
pub async fn install<S: FileSource>(
pub async fn install<S: FileSource + Clone>(
&self,
ctx: RpcContext,
mut s9pk: S9pk<S>,

View File

@@ -26,8 +26,10 @@ use crate::progress::{
use crate::registry::asset::RegistryAsset;
use crate::registry::context::{RegistryContext, RegistryUrlParams};
use crate::registry::os::index::OsVersionInfo;
use crate::registry::signer::FileValidator;
use crate::rpc_continuations::{RequestGuid, RpcContinuation};
use crate::registry::os::SIG_CONTEXT;
use crate::registry::signer::commitment::blake3::Blake3Commitment;
use crate::registry::signer::commitment::Commitment;
use crate::rpc_continuations::{Guid, RpcContinuation};
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::sound::{
CIRCLE_OF_5THS_SHORT, UPDATE_FAILED_1, UPDATE_FAILED_2, UPDATE_FAILED_3, UPDATE_FAILED_4,
@@ -54,7 +56,7 @@ pub struct UpdateSystemRes {
#[ts(type = "string | null")]
target: Option<Version>,
#[ts(type = "string | null")]
progress: Option<RequestGuid>,
progress: Option<Guid>,
}
/// An user/ daemon would call this to update the system to the latest version and do the updates available,
@@ -83,7 +85,7 @@ pub async fn update_system(
let target =
maybe_do_update(ctx.clone(), registry, target.unwrap_or(VersionRange::Any)).await?;
let progress = if progress && target.is_some() {
let guid = RequestGuid::new();
let guid = Guid::new();
ctx.clone()
.rpc_continuations
.add(
@@ -246,12 +248,12 @@ async fn maybe_do_update(
));
}
let validator = asset.validate(asset.signature_info.all_signers())?;
asset.validate(SIG_CONTEXT, asset.all_signers())?;
let mut progress = FullProgressTracker::new();
let progress_handle = progress.handle();
let mut download_phase = progress_handle.add_phase("Downloading File".into(), Some(100));
download_phase.set_total(validator.size()?);
download_phase.set_total(asset.commitment.size);
let reverify_phase = progress_handle.add_phase("Reverifying File".into(), Some(10));
let sync_boot_phase = progress_handle.add_phase("Syncing Boot Files".into(), Some(1));
let finalize_phase = progress_handle.add_phase("Finalizing Update".into(), Some(1));
@@ -300,7 +302,6 @@ async fn maybe_do_update(
tokio::spawn(async move {
let res = do_update(
ctx.clone(),
validator,
asset,
UpdateProgressHandles {
progress_handle,
@@ -382,8 +383,7 @@ struct UpdateProgressHandles {
#[instrument(skip_all)]
async fn do_update(
ctx: RpcContext,
validator: FileValidator,
asset: RegistryAsset,
asset: RegistryAsset<Blake3Commitment>,
UpdateProgressHandles {
progress_handle,
mut download_phase,
@@ -394,21 +394,23 @@ async fn do_update(
) -> Result<(), Error> {
download_phase.start();
let path = Path::new("/media/startos/images")
.join(hex::encode(&validator.blake3()?.as_bytes()[..16]))
.join(hex::encode(&asset.commitment.hash[..16]))
.with_extension("rootfs");
let mut dst = AtomicFile::new(&path, None::<&Path>)
.await
.with_kind(ErrorKind::Filesystem)?;
let mut download_writer = download_phase.writer(&mut *dst);
asset
.download(ctx.client.clone(), &mut download_writer, &validator)
.download(ctx.client.clone(), &mut download_writer)
.await?;
let (_, mut download_phase) = download_writer.into_inner();
dst.sync_all().await?;
download_phase.complete();
reverify_phase.start();
validator
.validate_file(&MultiCursorFile::open(&*dst).await?)
asset
.commitment
.check(&MultiCursorFile::open(&*dst).await?)
.await?;
dst.save().await.with_kind(ErrorKind::Filesystem)?;
reverify_phase.complete();

View File

@@ -9,18 +9,18 @@ use futures::{FutureExt, StreamExt};
use http::header::CONTENT_LENGTH;
use http::StatusCode;
use tokio::fs::File;
use tokio::io::{AsyncWrite, AsyncWriteExt};
use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
use tokio::sync::watch;
use crate::context::RpcContext;
use crate::prelude::*;
use crate::rpc_continuations::{RequestGuid, RpcContinuation};
use crate::rpc_continuations::{Guid, RpcContinuation};
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::io::TmpDir;
pub async fn upload(ctx: &RpcContext) -> Result<(RequestGuid, UploadingFile), Error> {
let guid = RequestGuid::new();
pub async fn upload(ctx: &RpcContext) -> Result<(Guid, UploadingFile), Error> {
let guid = Guid::new();
let (mut handle, file) = UploadingFile::new().await?;
ctx.rpc_continuations
.add(
@@ -120,22 +120,44 @@ impl Progress {
.and_then(|a| a.expected_size)
}
async fn ready_for(watch: &mut watch::Receiver<Self>, size: u64) -> Result<(), Error> {
if let Some(e) = watch
.wait_for(|progress| progress.error.is_some() || progress.written >= size)
match &*watch
.wait_for(|progress| {
progress.error.is_some()
|| progress.written >= size
|| progress.expected_size.map_or(false, |e| e < size)
})
.await
.map_err(|_| {
Error::new(
eyre!("failed to determine upload progress"),
ErrorKind::Network,
)
})?
.error
.as_ref()
.map(|e| e.clone_output())
{
Err(e)
} else {
Ok(())
})? {
Progress { error: Some(e), .. } => Err(e.clone_output()),
Progress {
expected_size: Some(e),
..
} if *e < size => Err(Error::new(
eyre!("file size is less than requested"),
ErrorKind::Network,
)),
_ => Ok(()),
}
}
async fn ready(watch: &mut watch::Receiver<Self>) -> Result<(), Error> {
match &*watch
.wait_for(|progress| {
progress.error.is_some() || Some(progress.written) == progress.expected_size
})
.await
.map_err(|_| {
Error::new(
eyre!("failed to determine upload progress"),
ErrorKind::Network,
)
})? {
Progress { error: Some(e), .. } => Err(e.clone_output()),
_ => Ok(()),
}
}
fn complete(&mut self) -> bool {
@@ -156,13 +178,25 @@ impl Progress {
));
true
}
Self { error, .. } if error.is_none() => {
Self {
error,
expected_size: Some(_),
..
} if error.is_none() => {
*error = Some(Error::new(
eyre!("Connection closed or timed out before full file received"),
ErrorKind::Network,
));
true
}
Self {
expected_size,
written,
..
} if expected_size.is_none() => {
*expected_size = Some(*written);
true
}
_ => false,
}
}
@@ -204,6 +238,10 @@ impl ArchiveSource for UploadingFile {
async fn size(&self) -> Option<u64> {
Progress::expected_size(&mut self.progress.clone()).await
}
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
Progress::ready(&mut self.progress.clone()).await?;
self.file.fetch_all().await
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
Progress::ready_for(&mut self.progress.clone(), position + size).await?;
self.file.fetch(position, size).await

View File

@@ -1,4 +1,4 @@
use std::collections::VecDeque;
use std::collections::{BTreeSet, VecDeque};
use std::future::Future;
use std::io::Cursor;
use std::os::unix::prelude::MetadataExt;
@@ -19,7 +19,7 @@ use tokio::io::{
duplex, AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, DuplexStream, ReadBuf, WriteHalf,
};
use tokio::net::TcpStream;
use tokio::sync::Notify;
use tokio::sync::{Notify, OwnedMutexGuard};
use tokio::time::{Instant, Sleep};
use crate::prelude::*;
@@ -804,7 +804,7 @@ pub struct TeeWriter<W1, W2> {
#[pin]
writer2: W2,
}
impl<W1: AsyncWrite, W2: AsyncWrite> TeeWriter<W1, W2> {
impl<W1, W2> TeeWriter<W1, W2> {
pub fn new(writer1: W1, writer2: W2, capacity: usize) -> Self {
Self {
capacity,
@@ -815,7 +815,6 @@ impl<W1: AsyncWrite, W2: AsyncWrite> TeeWriter<W1, W2> {
}
}
}
impl<W1: AsyncWrite + Unpin, W2: AsyncWrite + Unpin> TeeWriter<W1, W2> {
pub async fn into_inner(mut self) -> Result<(W1, W2), Error> {
self.flush().await?;
@@ -1007,3 +1006,114 @@ impl AsyncWrite for ParallelBlake3Writer {
Poll::Pending
}
}
#[pin_project::pin_project]
pub struct TrackingIO<T> {
position: u64,
#[pin]
io: T,
}
impl<T> TrackingIO<T> {
pub fn new(start: u64, io: T) -> Self {
Self {
position: start,
io,
}
}
pub fn position(&self) -> u64 {
self.position
}
pub fn into_inner(self) -> T {
self.io
}
}
impl<W: AsyncWrite> AsyncWrite for TrackingIO<W> {
fn poll_write(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &[u8],
) -> std::task::Poll<Result<usize, std::io::Error>> {
let this = self.project();
let written = futures::ready!(this.io.poll_write(cx, buf)?);
*this.position += written as u64;
Poll::Ready(Ok(written))
}
fn poll_flush(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), std::io::Error>> {
self.project().io.poll_flush(cx)
}
fn poll_shutdown(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), std::io::Error>> {
self.project().io.poll_shutdown(cx)
}
}
impl<R: AsyncRead> AsyncRead for TrackingIO<R> {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
let this = self.project();
let start = buf.filled().len();
futures::ready!(this.io.poll_read(cx, buf)?);
*this.position += (buf.filled().len() - start) as u64;
Poll::Ready(Ok(()))
}
}
impl<T> std::cmp::PartialEq for TrackingIO<T> {
fn eq(&self, other: &Self) -> bool {
self.position.eq(&other.position)
}
}
impl<T> std::cmp::Eq for TrackingIO<T> {}
impl<T> std::cmp::PartialOrd for TrackingIO<T> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.position.partial_cmp(&other.position)
}
}
impl<T> std::cmp::Ord for TrackingIO<T> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.position.cmp(&other.position)
}
}
impl<T> std::borrow::Borrow<u64> for TrackingIO<T> {
fn borrow(&self) -> &u64 {
&self.position
}
}
pub struct MutexIO<T>(OwnedMutexGuard<T>);
impl<R: AsyncRead + Unpin> AsyncRead for MutexIO<R> {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
Pin::new(&mut *self.get_mut().0).poll_read(cx, buf)
}
}
impl<W: AsyncWrite + Unpin> AsyncWrite for MutexIO<W> {
fn poll_write(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, std::io::Error>> {
Pin::new(&mut *self.get_mut().0).poll_write(cx, buf)
}
fn poll_flush(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Result<(), std::io::Error>> {
Pin::new(&mut *self.get_mut().0).poll_flush(cx)
}
fn poll_shutdown(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Result<(), std::io::Error>> {
Pin::new(&mut *self.get_mut().0).poll_shutdown(cx)
}
}

View File

@@ -1,12 +1,13 @@
use models::{Error, ResultExt};
use serde::{Deserialize, Serialize};
use tokio::process::Command;
use ts_rs::TS;
use crate::util::Invoke;
const KNOWN_CLASSES: &[&str] = &["processor", "display"];
#[derive(Debug, Deserialize, Serialize)]
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(tag = "class")]
#[serde(rename_all = "camelCase")]
pub enum LshwDevice {
@@ -28,12 +29,12 @@ impl LshwDevice {
}
}
#[derive(Debug, Deserialize, Serialize)]
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
pub struct LshwProcessor {
pub product: String,
}
#[derive(Debug, Deserialize, Serialize)]
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
pub struct LshwDisplay {
pub product: String,
}

View File

@@ -15,7 +15,7 @@ use helpers::canonicalize;
pub use helpers::NonDetachingJoinHandle;
use imbl_value::InternedString;
use lazy_static::lazy_static;
pub use models::Version;
pub use models::VersionString;
use pin_project::pin_project;
use sha2::Digest;
use tokio::fs::File;

View File

@@ -1,3 +1,4 @@
use std::path::Path;
use clap::Parser;
use rpc_toolkit::{from_fn_async, Context, ParentHandler};
@@ -9,9 +10,11 @@ use crate::context::CliContext;
use crate::prelude::*;
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource};
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::io::ParallelBlake3Writer;
use crate::util::serde::Base16;
use crate::util::Apply;
use crate::CAP_10_MiB;
pub fn util<C: Context>() -> ParentHandler<C> {
ParentHandler::new().subcommand("b3sum", from_fn_async(b3sum))
@@ -28,26 +31,29 @@ pub async fn b3sum(
ctx: CliContext,
B3sumParams { file, allow_mmap }: B3sumParams,
) -> Result<Base16<[u8; 32]>, Error> {
let source = if let Ok(url) = file.parse::<Url>() {
async fn b3sum_source<S: ArchiveSource>(source: S) -> Result<Base16<[u8; 32]>, Error> {
let mut hasher = ParallelBlake3Writer::new(CAP_10_MiB);
source.copy_all_to(&mut hasher).await?;
hasher.finalize().await.map(|h| *h.as_bytes()).map(Base16)
}
async fn b3sum_file(
path: impl AsRef<Path>,
allow_mmap: bool,
) -> Result<Base16<[u8; 32]>, Error> {
let file = MultiCursorFile::from(File::open(path).await?);
if allow_mmap {
return file.blake3_mmap().await.map(|h| *h.as_bytes()).map(Base16);
}
b3sum_source(file).await
}
if let Ok(url) = file.parse::<Url>() {
if url.scheme() == "file" {
let file = MultiCursorFile::from(File::open(url.path()).await?);
if allow_mmap {
return file.blake3_mmap().await.map(|h| *h.as_bytes()).map(Base16);
}
DynFileSource::new(file.section(
0,
file.size().await.ok_or_else(|| {
Error::new(eyre!("failed to get file size"), ErrorKind::Filesystem)
})?,
))
b3sum_file(url.path(), allow_mmap).await
} else if url.scheme() == "http" || url.scheme() == "https" {
let file = HttpSource::new(ctx.client.clone(), url).await?;
DynFileSource::new(file.section(
0,
file.size().await.ok_or_else(|| {
Error::new(eyre!("failed to get file size"), ErrorKind::Filesystem)
})?,
))
HttpSource::new(ctx.client.clone(), url)
.await?
.apply(b3sum_source)
.await
} else {
return Err(Error::new(
eyre!("unknown scheme: {}", url.scheme()),
@@ -55,18 +61,6 @@ pub async fn b3sum(
));
}
} else {
let file = MultiCursorFile::from(File::open(file).await?);
if allow_mmap {
return file.blake3_mmap().await.map(|h| *h.as_bytes()).map(Base16);
}
DynFileSource::new(file.section(
0,
file.size().await.ok_or_else(|| {
Error::new(eyre!("failed to get file size"), ErrorKind::Filesystem)
})?,
))
};
let mut hasher = ParallelBlake3Writer::new(crate::s9pk::merkle_archive::hash::BUFFER_CAPACITY);
source.copy(&mut hasher).await?;
hasher.finalize().await.map(|h| *h.as_bytes()).map(Base16)
b3sum_file(file, allow_mmap).await
}
}

View File

@@ -22,6 +22,7 @@ use ts_rs::TS;
use super::IntoDoubleEndedIterator;
use crate::prelude::*;
use crate::util::Apply;
use crate::util::clap::FromStrParser;
pub fn deserialize_from_str<
@@ -999,6 +1000,11 @@ impl<T: AsRef<[u8]>> std::fmt::Display for Base16<T> {
#[derive(TS)]
#[ts(type = "string", concrete(T = Vec<u8>))]
pub struct Base32<T>(pub T);
impl<T: AsRef<[u8]>> std::fmt::Display for Base32<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
base32::encode(base32::Alphabet::RFC4648 { padding: true }, self.0.as_ref()).fmt(f)
}
}
impl<'de, T: TryFrom<Vec<u8>>> Deserialize<'de> for Base32<T> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
@@ -1022,32 +1028,35 @@ impl<T: AsRef<[u8]>> Serialize for Base32<T> {
where
S: Serializer,
{
serializer.serialize_str(&base32::encode(
base32::Alphabet::RFC4648 { padding: true },
self.0.as_ref(),
))
}
}
impl<T: AsRef<[u8]>> std::fmt::Display for Base32<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
base32::encode(base32::Alphabet::RFC4648 { padding: true }, self.0.as_ref()).fmt(f)
serialize_display(self, serializer)
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, TS)]
#[ts(type = "string", concrete(T = Vec<u8>))]
pub struct Base64<T>(pub T);
impl<T: AsRef<[u8]>> std::fmt::Display for Base64<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&base64::encode(self.0.as_ref()))
}
}
impl<T: TryFrom<Vec<u8>>> FromStr for Base64<T>
{
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
base64::decode(&s)
.with_kind(ErrorKind::Deserialization)?
.apply(TryFrom::try_from)
.map(Self)
.map_err(|_| Error::new(eyre!("failed to create from buffer"), ErrorKind::Deserialization))
}
}
impl<'de, T: TryFrom<Vec<u8>>> Deserialize<'de> for Base64<T> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
base64::decode(&s)
.map_err(serde::de::Error::custom)?
.try_into()
.map_err(|_| serde::de::Error::custom("invalid length"))
.map(Self)
deserialize_from_str(deserializer)
}
}
impl<T: AsRef<[u8]>> Serialize for Base64<T> {
@@ -1055,7 +1064,7 @@ impl<T: AsRef<[u8]>> Serialize for Base64<T> {
where
S: Serializer,
{
serializer.serialize_str(&base64::encode(self.0.as_ref()))
serialize_display(self, serializer)
}
}
impl<T> Deref for Base64<T> {

View File

@@ -28,7 +28,7 @@ enum Version {
}
impl Version {
fn from_util_version(version: crate::util::Version) -> Self {
fn from_util_version(version: crate::util::VersionString) -> Self {
serde_json::to_value(version.clone())
.and_then(serde_json::from_value)
.unwrap_or_else(|_e| {
@@ -161,7 +161,7 @@ where
T: VersionT,
{
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let v = crate::util::Version::deserialize(deserializer)?;
let v = crate::util::VersionString::deserialize(deserializer)?;
let version = T::new();
if *v < version.semver() {
Ok(Self(version, v.into_version()))
@@ -186,7 +186,7 @@ where
T: VersionT,
{
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let v = crate::util::Version::deserialize(deserializer)?;
let v = crate::util::VersionString::deserialize(deserializer)?;
let version = T::new();
if *v == version.semver() {
Ok(Wrapper(version))

View File

@@ -6,7 +6,7 @@ use models::{HostId, PackageId};
use crate::net::PACKAGE_CERT_PATH;
use crate::prelude::*;
use crate::util::Version;
use crate::util::VersionString;
pub const PKG_VOLUME_DIR: &str = "package-data/volumes";
pub const BACKUP_DIR: &str = "/media/startos/backups";
@@ -20,7 +20,7 @@ pub fn data_dir<P: AsRef<Path>>(datadir: P, pkg_id: &PackageId, volume_id: &Volu
.join(volume_id)
}
pub fn asset_dir<P: AsRef<Path>>(datadir: P, pkg_id: &PackageId, version: &Version) -> PathBuf {
pub fn asset_dir<P: AsRef<Path>>(datadir: P, pkg_id: &PackageId, version: &VersionString) -> PathBuf {
datadir
.as_ref()
.join(PKG_VOLUME_DIR)

View File

@@ -1,7 +1,7 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SignerKey } from "./SignerKey"
import type { AnyVerifyingKey } from "./AnyVerifyingKey"
export type AcceptSigners =
| { signer: SignerKey }
| { signer: AnyVerifyingKey }
| { any: Array<AcceptSigners> }
| { all: Array<AcceptSigners> }

View File

@@ -1,3 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Guid } from "./Guid"
export type AddAdminParams = { signer: string }
export type AddAdminParams = { signer: Guid }

View File

@@ -1,12 +1,13 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Signature } from "./Signature"
import type { SignerKey } from "./SignerKey"
import type { AnySignature } from "./AnySignature"
import type { Blake3Commitment } from "./Blake3Commitment"
import type { Version } from "./Version"
export type AddAssetParams = {
url: string
signature: Signature
version: string
version: Version
platform: string
upload: boolean
__auth_signer: SignerKey
url: string
signature: AnySignature
commitment: Blake3Commitment
}

View File

@@ -1,7 +1,8 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Version } from "./Version"
export type AddVersionParams = {
version: string
version: Version
headline: string
releaseNotes: string
sourceVersion: string

View File

@@ -1,4 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Pem } from "./Pem"
export type SignerKey = { alg: "ed25519"; pubkey: Pem }
export type AnySignature = string

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type AnySigningKey = string

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type AnyVerifyingKey = string

View File

@@ -0,0 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Base64 } from "./Base64"
export type Blake3Commitment = { hash: Base64; size: number }

View File

@@ -0,0 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Description } from "./Description"
export type Category = { name: string; description: Description }

View File

@@ -1,5 +1,13 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { DataUrl } from "./DataUrl"
import type { Guid } from "./Guid"
import type { OsIndex } from "./OsIndex"
import type { PackageIndex } from "./PackageIndex"
import type { SignerInfo } from "./SignerInfo"
export type FullIndex = { os: OsIndex; signers: { [key: string]: SignerInfo } }
export type FullIndex = {
icon: DataUrl | null
package: PackageIndex
os: OsIndex
signers: { [key: Guid]: SignerInfo }
}

View File

@@ -1,3 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Version } from "./Version"
export type GetOsAssetParams = { version: string; platform: string }
export type GetOsAssetParams = { version: Version; platform: string }

View File

@@ -0,0 +1,10 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PackageDetailLevel } from "./PackageDetailLevel"
import type { PackageId } from "./PackageId"
export type GetPackageParams = {
id: PackageId | null
version: string | null
sourceVersion: string | null
otherVersions: PackageDetailLevel | null
}

View File

@@ -0,0 +1,9 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PackageInfoShort } from "./PackageInfoShort"
import type { PackageVersionInfo } from "./PackageVersionInfo"
import type { Version } from "./Version"
export type GetPackageResponse = {
best: { [key: Version]: PackageVersionInfo }
otherVersions?: { [key: Version]: PackageInfoShort }
}

View File

@@ -0,0 +1,8 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PackageVersionInfo } from "./PackageVersionInfo"
import type { Version } from "./Version"
export type GetPackageResponseFull = {
best: { [key: Version]: PackageVersionInfo }
otherVersions: { [key: Version]: PackageVersionInfo }
}

View File

@@ -1,3 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type Pem = string
export type Guid = string

View File

@@ -2,6 +2,6 @@
export type HardwareRequirements = {
device: { [key: string]: string }
ram: bigint | null
ram: number | null
arch: Array<string> | null
}

View File

@@ -1,3 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Version } from "./Version"
export type ListVersionSignersParams = { version: string }
export type ListVersionSignersParams = { version: Version }

View File

@@ -5,15 +5,15 @@ import type { Description } from "./Description"
import type { HardwareRequirements } from "./HardwareRequirements"
import type { ImageId } from "./ImageId"
import type { PackageId } from "./PackageId"
import type { Version } from "./Version"
import type { VolumeId } from "./VolumeId"
export type Manifest = {
id: PackageId
title: string
version: string
version: Version
releaseNotes: string
license: string
replaces: Array<string>
wrapperRepo: string
upstreamRepo: string
supportSite: string
@@ -27,6 +27,6 @@ export type Manifest = {
dependencies: Dependencies
hardwareRequirements: HardwareRequirements
gitHash: string | null
osVersion: string
osVersion: Version
hasConfig: boolean
}

View File

@@ -1,10 +1,7 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Base64 } from "./Base64"
import type { Pem } from "./Pem"
export type Blake3Ed25519Signature = {
hash: Base64
size: bigint
pubkey: Pem
signature: Base64
export type MerkleArchiveCommitment = {
rootSighash: Base64
rootMaxsize: number
}

View File

@@ -1,4 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { OsVersionInfo } from "./OsVersionInfo"
import type { Version } from "./Version"
export type OsIndex = { versions: { [key: string]: OsVersionInfo } }
export type OsIndex = { versions: { [key: Version]: OsVersionInfo } }

View File

@@ -1,12 +1,14 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Blake3Commitment } from "./Blake3Commitment"
import type { Guid } from "./Guid"
import type { RegistryAsset } from "./RegistryAsset"
export type OsVersionInfo = {
headline: string
releaseNotes: string
sourceVersion: string
signers: string[]
iso: { [key: string]: RegistryAsset }
squashfs: { [key: string]: RegistryAsset }
img: { [key: string]: RegistryAsset }
signers: Array<Guid>
iso: { [key: string]: RegistryAsset<Blake3Commitment> }
squashfs: { [key: string]: RegistryAsset<Blake3Commitment> }
img: { [key: string]: RegistryAsset<Blake3Commitment> }
}

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type PackageDetailLevel = "short" | "full"

View File

@@ -0,0 +1,9 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Category } from "./Category"
import type { PackageId } from "./PackageId"
import type { PackageInfo } from "./PackageInfo"
export type PackageIndex = {
categories: { [key: string]: Category }
packages: { [key: PackageId]: PackageInfo }
}

View File

@@ -0,0 +1,9 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Guid } from "./Guid"
import type { PackageVersionInfo } from "./PackageVersionInfo"
import type { Version } from "./Version"
export type PackageInfo = {
signers: Array<Guid>
versions: { [key: Version]: PackageVersionInfo }
}

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type PackageInfoShort = { releaseNotes: string }

View File

@@ -0,0 +1,25 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { DataUrl } from "./DataUrl"
import type { Description } from "./Description"
import type { HardwareRequirements } from "./HardwareRequirements"
import type { MerkleArchiveCommitment } from "./MerkleArchiveCommitment"
import type { RegistryAsset } from "./RegistryAsset"
import type { Version } from "./Version"
export type PackageVersionInfo = {
title: string
icon: DataUrl
description: Description
releaseNotes: string
gitHash: string
license: string
wrapperRepo: string
upstreamRepo: string
supportSite: string
marketingSite: string
categories: string[]
osVersion: Version
hardwareRequirements: HardwareRequirements
sourceVersion: string | null
s9pk: RegistryAsset<MerkleArchiveCommitment>
}

View File

@@ -1,4 +1,9 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SignatureInfo } from "./SignatureInfo"
import type { AnySignature } from "./AnySignature"
import type { AnyVerifyingKey } from "./AnyVerifyingKey"
export type RegistryAsset = { url: string; signatureInfo: SignatureInfo }
export type RegistryAsset<Commitment> = {
url: string
commitment: Commitment
signatures: { [key: AnyVerifyingKey]: AnySignature }
}

View File

@@ -1,3 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Version } from "./Version"
export type RemoveVersionParams = { version: string }
export type RemoveVersionParams = { version: Version }

Some files were not shown because too many files have changed in this diff Show More