Feature/registry package index (#2623)

* include system images in compat s9pk

* wip

* wip

* update types

* wip

* fix signature serialization

* Add SignatureHeader conversions

* finish display impl for get

---------

Co-authored-by: Shadowy Super Coder <musashidisciple@proton.me>
This commit is contained in:
Aiden McClelland
2024-05-31 12:13:23 -06:00
committed by GitHub
parent 0ccbb52c1f
commit fd7c2fbe93
113 changed files with 3265 additions and 1436 deletions

View File

@@ -10,10 +10,11 @@ use ts_rs::TS;
use crate::context::CliContext;
use crate::prelude::*;
use crate::registry::context::RegistryContext;
use crate::registry::signer::{ContactInfo, SignerInfo, SignerKey};
use crate::registry::signer::sign::AnyVerifyingKey;
use crate::registry::signer::{ContactInfo, SignerInfo};
use crate::registry::RegistryDatabase;
use crate::rpc_continuations::RequestGuid;
use crate::util::serde::{display_serializable, HandlerExtSerde, Pem, WithIoFormat};
use crate::rpc_continuations::Guid;
use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat};
pub fn admin_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
@@ -48,8 +49,8 @@ fn signers_api<C: Context>() -> ParentHandler<C> {
.subcommand("add", from_fn_async(cli_add_signer).no_display())
}
impl Model<BTreeMap<RequestGuid, SignerInfo>> {
pub fn get_signer(&self, key: &SignerKey) -> Result<RequestGuid, Error> {
impl Model<BTreeMap<Guid, SignerInfo>> {
pub fn get_signer(&self, key: &AnyVerifyingKey) -> Result<Guid, Error> {
self.as_entries()?
.into_iter()
.map(|(guid, s)| Ok::<_, Error>((guid, s.as_keys().de()?)))
@@ -60,7 +61,7 @@ impl Model<BTreeMap<RequestGuid, SignerInfo>> {
.ok_or_else(|| Error::new(eyre!("unknown signer"), ErrorKind::Authorization))
}
pub fn get_signer_info(&self, key: &SignerKey) -> Result<(RequestGuid, SignerInfo), Error> {
pub fn get_signer_info(&self, key: &AnyVerifyingKey) -> Result<(Guid, SignerInfo), Error> {
self.as_entries()?
.into_iter()
.map(|(guid, s)| Ok::<_, Error>((guid, s.de()?)))
@@ -88,17 +89,15 @@ impl Model<BTreeMap<RequestGuid, SignerInfo>> {
ErrorKind::InvalidRequest,
));
}
self.insert(&RequestGuid::new(), signer)
self.insert(&Guid::new(), signer)
}
}
pub async fn list_signers(
ctx: RegistryContext,
) -> Result<BTreeMap<RequestGuid, SignerInfo>, Error> {
pub async fn list_signers(ctx: RegistryContext) -> Result<BTreeMap<Guid, SignerInfo>, Error> {
ctx.db.peek().await.into_index().into_signers().de()
}
pub fn display_signers<T>(params: WithIoFormat<T>, signers: BTreeMap<RequestGuid, SignerInfo>) {
pub fn display_signers<T>(params: WithIoFormat<T>, signers: BTreeMap<Guid, SignerInfo>) {
use prettytable::*;
if let Some(format) = params.format {
@@ -137,8 +136,8 @@ pub struct CliAddSignerParams {
pub name: String,
#[arg(long = "contact", short = 'c')]
pub contact: Vec<ContactInfo>,
#[arg(long = "ed25519-key")]
pub ed25519_keys: Vec<Pem<ed25519_dalek::VerifyingKey>>,
#[arg(long = "key")]
pub keys: Vec<AnyVerifyingKey>,
pub database: Option<PathBuf>,
}
@@ -151,7 +150,7 @@ pub async fn cli_add_signer(
CliAddSignerParams {
name,
contact,
ed25519_keys,
keys,
database,
},
..
@@ -160,7 +159,7 @@ pub async fn cli_add_signer(
let signer = SignerInfo {
name,
contact,
keys: ed25519_keys.into_iter().map(SignerKey::Ed25519).collect(),
keys: keys.into_iter().collect(),
};
if let Some(database) = database {
TypedPatchDb::<RegistryDatabase>::load(PatchDb::open(database).await?)
@@ -181,8 +180,7 @@ pub async fn cli_add_signer(
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct AddAdminParams {
#[ts(type = "string")]
pub signer: RequestGuid,
pub signer: Guid,
}
pub async fn add_admin(
@@ -206,7 +204,7 @@ pub async fn add_admin(
#[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")]
pub struct CliAddAdminParams {
pub signer: RequestGuid,
pub signer: Guid,
pub database: Option<PathBuf>,
}
@@ -242,7 +240,7 @@ pub async fn cli_add_admin(
Ok(())
}
pub async fn list_admins(ctx: RegistryContext) -> Result<BTreeMap<RequestGuid, SignerInfo>, Error> {
pub async fn list_admins(ctx: RegistryContext) -> Result<BTreeMap<Guid, SignerInfo>, Error> {
let db = ctx.db.peek().await;
let admins = db.as_admins().de()?;
Ok(db

View File

@@ -1,3 +1,5 @@
use std::collections::HashMap;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use tokio::io::AsyncWrite;
@@ -5,32 +7,48 @@ use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::registry::signer::{AcceptSigners, FileValidator, SignatureInfo};
use crate::registry::signer::commitment::{Commitment, Digestable};
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey};
use crate::registry::signer::AcceptSigners;
use crate::s9pk::merkle_archive::source::http::HttpSource;
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct RegistryAsset {
pub struct RegistryAsset<Commitment> {
#[ts(type = "string")]
pub url: Url,
pub signature_info: SignatureInfo,
pub commitment: Commitment,
pub signatures: HashMap<AnyVerifyingKey, AnySignature>,
}
impl AsRef<RegistryAsset> for RegistryAsset {
fn as_ref(&self) -> &RegistryAsset {
self
impl<Commitment> RegistryAsset<Commitment> {
pub fn all_signers(&self) -> AcceptSigners {
AcceptSigners::All(
self.signatures
.keys()
.cloned()
.map(AcceptSigners::Signer)
.collect(),
)
}
}
impl RegistryAsset {
pub fn validate(&self, accept: AcceptSigners) -> Result<FileValidator, Error> {
self.signature_info.validate(accept)
impl<Commitment: Digestable> RegistryAsset<Commitment> {
pub fn validate(&self, context: &str, mut accept: AcceptSigners) -> Result<&Commitment, Error> {
for (signer, signature) in &self.signatures {
accept.process_signature(signer, &self.commitment, context, signature)?;
}
accept.try_accept()?;
Ok(&self.commitment)
}
}
impl<C: for<'a> Commitment<&'a HttpSource>> RegistryAsset<C> {
pub async fn download(
&self,
client: Client,
dst: &mut (impl AsyncWrite + Unpin + Send + ?Sized),
validator: &FileValidator,
) -> Result<(), Error> {
validator.download(self.url.clone(), client, dst).await
self.commitment
.copy_to(&HttpSource::new(client, self.url.clone()).await?, dst)
.await
}
}

View File

@@ -6,19 +6,23 @@ use axum::body::Body;
use axum::extract::Request;
use axum::response::Response;
use chrono::Utc;
use http_body_util::BodyExt;
use http::HeaderValue;
use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{Middleware, RpcRequest, RpcResponse};
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha512};
use tokio::io::AsyncWriteExt;
use tokio::sync::Mutex;
use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::registry::context::RegistryContext;
use crate::registry::signer::SignerKey;
use crate::util::serde::{Base64, Pem};
use crate::registry::signer::commitment::request::RequestCommitment;
use crate::registry::signer::commitment::Commitment;
use crate::registry::signer::sign::{
AnySignature, AnySigningKey, AnyVerifyingKey, SignatureScheme,
};
use crate::util::serde::Base64;
pub const AUTH_SIG_HEADER: &str = "X-StartOS-Registry-Auth-Sig";
@@ -34,7 +38,7 @@ pub struct Metadata {
#[derive(Clone)]
pub struct Auth {
nonce_cache: Arc<Mutex<BTreeMap<Instant, u64>>>, // for replay protection
signer: Option<Result<SignerKey, RpcError>>,
signer: Option<Result<AnyVerifyingKey, RpcError>>,
}
impl Auth {
pub fn new() -> Self {
@@ -68,41 +72,57 @@ pub struct RegistryAdminLogRecord {
pub name: String,
#[ts(type = "{ id: string | number | null; method: string; params: any }")]
pub request: RpcRequest,
pub key: SignerKey,
pub key: AnyVerifyingKey,
}
#[derive(Serialize, Deserialize)]
pub struct SignatureHeader {
pub timestamp: i64,
pub nonce: u64,
#[serde(flatten)]
pub signer: SignerKey,
pub signature: Base64<[u8; 64]>,
pub commitment: RequestCommitment,
pub signer: AnyVerifyingKey,
pub signature: AnySignature,
}
impl SignatureHeader {
pub fn sign_ed25519(
key: &ed25519_dalek::SigningKey,
body: &[u8],
context: &str,
) -> Result<Self, Error> {
pub fn to_header(&self) -> HeaderValue {
let mut url: Url = "http://localhost".parse().unwrap();
self.commitment.append_query(&mut url);
url.query_pairs_mut()
.append_pair("signer", &self.signer.to_string());
url.query_pairs_mut()
.append_pair("signature", &self.signature.to_string());
HeaderValue::from_str(url.query().unwrap_or_default()).unwrap()
}
pub fn from_header(header: &HeaderValue) -> Result<Self, Error> {
let url: Url = format!(
"http://localhost/?{}",
header.to_str().with_kind(ErrorKind::Utf8)?
)
.parse()?;
let query: BTreeMap<_, _> = url.query_pairs().collect();
Ok(Self {
commitment: RequestCommitment::from_query(&url)?,
signer: query.get("signer").or_not_found("signer")?.parse()?,
signature: query.get("signature").or_not_found("signature")?.parse()?,
})
}
pub fn sign(signer: &AnySigningKey, body: &[u8], context: &str) -> Result<Self, Error> {
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs() as i64)
.unwrap_or_else(|e| e.duration().as_secs() as i64 * -1);
let nonce = rand::random();
let signer = SignerKey::Ed25519(Pem(key.verifying_key()));
let mut hasher = Sha512::new();
hasher.update(&i64::to_be_bytes(timestamp));
hasher.update(&u64::to_be_bytes(nonce));
hasher.update(body);
let signature = Base64(
key.sign_prehashed(hasher, Some(context.as_bytes()))?
.to_bytes(),
);
Ok(Self {
let commitment = RequestCommitment {
timestamp,
nonce,
signer,
size: body.len() as u64,
blake3: Base64(*blake3::hash(body).as_bytes()),
};
let signature = signer
.scheme()
.sign_commitment(&signer, &commitment, context)?;
Ok(Self {
commitment,
signer: signer.verifying_key(),
signature,
})
}
@@ -120,43 +140,40 @@ impl Middleware<RegistryContext> for Auth {
async {
let request = request;
let SignatureHeader {
timestamp,
nonce,
commitment,
signer,
signature,
} = serde_urlencoded::from_str(
} = SignatureHeader::from_header(
request
.headers()
.get(AUTH_SIG_HEADER)
.or_not_found("missing X-StartOS-Registry-Auth-Sig")
.with_kind(ErrorKind::InvalidRequest)?
.to_str()
.with_kind(ErrorKind::Utf8)?,
)
.with_kind(ErrorKind::Deserialization)?;
.with_kind(ErrorKind::InvalidRequest)?,
)?;
signer.scheme().verify_commitment(
&signer,
&commitment,
&ctx.hostname,
&signature,
)?;
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs() as i64)
.unwrap_or_else(|e| e.duration().as_secs() as i64 * -1);
if (now - timestamp).abs() > 30 {
if (now - commitment.timestamp).abs() > 30 {
return Err(Error::new(
eyre!("timestamp not within 30s of now"),
ErrorKind::InvalidSignature,
));
}
self.handle_nonce(nonce).await?;
let body = std::mem::replace(request.body_mut(), Body::empty())
.collect()
.await
.with_kind(ErrorKind::Network)?
.to_bytes();
let mut verifier = signer.verifier();
verifier.update(&i64::to_be_bytes(timestamp));
verifier.update(&u64::to_be_bytes(nonce));
verifier.update(&body);
self.handle_nonce(commitment.nonce).await?;
let mut body = Vec::with_capacity(commitment.size as usize);
commitment.copy_to(request, &mut body).await?;
*request.body_mut() = Body::from(body);
verifier.verify(&*signature, &ctx.hostname)?;
Ok(signer)
}
.await

View File

@@ -6,6 +6,7 @@ use std::sync::Arc;
use clap::Parser;
use imbl_value::InternedString;
use patch_db::PatchDb;
use reqwest::{Client, Proxy};
use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{CallRemote, Context, Empty};
use serde::{Deserialize, Serialize};
@@ -17,9 +18,10 @@ use crate::context::config::{ContextConfig, CONFIG_PATH};
use crate::context::{CliContext, RpcContext};
use crate::prelude::*;
use crate::registry::auth::{SignatureHeader, AUTH_SIG_HEADER};
use crate::registry::device_info::{DeviceInfo, DEVICE_INFO_HEADER};
use crate::registry::signer::sign::AnySigningKey;
use crate::registry::RegistryDatabase;
use crate::rpc_continuations::RpcContinuations;
use crate::version::VersionT;
#[derive(Debug, Clone, Default, Deserialize, Serialize, Parser)]
#[serde(rename_all = "kebab-case")]
@@ -31,6 +33,8 @@ pub struct RegistryConfig {
pub listen: Option<SocketAddr>,
#[arg(short = 'h', long = "hostname")]
pub hostname: InternedString,
#[arg(short = 'p', long = "proxy")]
pub tor_proxy: Option<Url>,
#[arg(short = 'd', long = "datadir")]
pub datadir: Option<PathBuf>,
}
@@ -58,6 +62,7 @@ pub struct RegistryContextSeed {
pub db: TypedPatchDb<RegistryDatabase>,
pub datadir: PathBuf,
pub rpc_continuations: RpcContinuations,
pub client: Client,
pub shutdown: Sender<()>,
}
@@ -81,6 +86,11 @@ impl RegistryContext {
|| async { Ok(Default::default()) },
)
.await?;
let tor_proxy_url = config
.tor_proxy
.clone()
.map(Ok)
.unwrap_or_else(|| "socks5h://localhost:9050".parse())?;
Ok(Self(Arc::new(RegistryContextSeed {
hostname: config.hostname.clone(),
listen: config
@@ -89,6 +99,16 @@ impl RegistryContext {
db,
datadir,
rpc_continuations: RpcContinuations::new(),
client: Client::builder()
.proxy(Proxy::custom(move |url| {
if url.host_str().map_or(false, |h| h.ends_with(".onion")) {
Some(tor_proxy_url.clone())
} else {
None
}
}))
.build()
.with_kind(crate::ErrorKind::ParseUrl)?,
shutdown,
})))
}
@@ -145,12 +165,11 @@ impl CallRemote<RegistryContext> for CliContext {
.header(CONTENT_LENGTH, body.len())
.header(
AUTH_SIG_HEADER,
serde_urlencoded::to_string(&SignatureHeader::sign_ed25519(
self.developer_key()?,
SignatureHeader::sign(
&AnySigningKey::Ed25519(self.developer_key()?.clone()),
&body,
&host,
)?)
.with_kind(ErrorKind::Serialization)?,
)?.to_header(),
)
.body(body)
.send()
@@ -171,29 +190,6 @@ impl CallRemote<RegistryContext> for CliContext {
}
}
fn hardware_header(ctx: &RpcContext) -> String {
let mut url: Url = "http://localhost".parse().unwrap();
url.query_pairs_mut()
.append_pair(
"os.version",
&crate::version::Current::new().semver().to_string(),
)
.append_pair(
"os.compat",
&crate::version::Current::new().compat().to_string(),
)
.append_pair("os.arch", &*crate::PLATFORM)
.append_pair("hardware.arch", &*crate::ARCH)
.append_pair("hardware.ram", &ctx.hardware.ram.to_string());
for hw in &ctx.hardware.devices {
url.query_pairs_mut()
.append_pair(&format!("hardware.device.{}", hw.class()), hw.product());
}
url.query().unwrap_or_default().to_string()
}
impl CallRemote<RegistryContext, RegistryUrlParams> for RpcContext {
async fn call_remote(
&self,
@@ -221,7 +217,7 @@ impl CallRemote<RegistryContext, RegistryUrlParams> for RpcContext {
.header(CONTENT_TYPE, "application/json")
.header(ACCEPT, "application/json")
.header(CONTENT_LENGTH, body.len())
.header("X-StartOS-Hardware", &hardware_header(self))
.header(DEVICE_INFO_HEADER, DeviceInfo::from(self).to_header_value())
.body(body)
.send()
.await?;

View File

@@ -0,0 +1,199 @@
use std::collections::BTreeMap;
use std::convert::identity;
use std::ops::Deref;
use axum::extract::Request;
use axum::response::Response;
use emver::{Version, VersionRange};
use http::HeaderValue;
use imbl_value::InternedString;
use rpc_toolkit::{Middleware, RpcRequest, RpcResponse};
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use url::Url;
use crate::context::RpcContext;
use crate::prelude::*;
use crate::registry::context::RegistryContext;
use crate::util::VersionString;
use crate::version::VersionT;
pub const DEVICE_INFO_HEADER: &str = "X-StartOS-Device-Info";
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
pub struct DeviceInfo {
pub os: OsInfo,
pub hardware: HardwareInfo,
}
impl From<&RpcContext> for DeviceInfo {
fn from(value: &RpcContext) -> Self {
Self {
os: OsInfo::from(value),
hardware: HardwareInfo::from(value),
}
}
}
impl DeviceInfo {
pub fn to_header_value(&self) -> HeaderValue {
let mut url: Url = "http://localhost".parse().unwrap();
url.query_pairs_mut()
.append_pair("os.version", &self.os.version.to_string())
.append_pair("os.compat", &self.os.compat.to_string())
.append_pair("os.platform", &*self.os.platform)
.append_pair("hardware.arch", &*self.hardware.arch)
.append_pair("hardware.ram", &self.hardware.ram.to_string());
for (class, products) in &self.hardware.devices {
for product in products {
url.query_pairs_mut()
.append_pair(&format!("hardware.device.{}", class), product);
}
}
HeaderValue::from_str(url.query().unwrap_or_default()).unwrap()
}
pub fn from_header_value(header: &HeaderValue) -> Result<Self, Error> {
let url: Url = format!(
"http://localhost/?{}",
header.to_str().with_kind(ErrorKind::ParseUrl)?
)
.parse()?;
let query: BTreeMap<_, _> = url.query_pairs().collect();
Ok(Self {
os: OsInfo {
version: query
.get("os.version")
.or_not_found("os.version")?
.parse()?,
compat: query.get("os.compat").or_not_found("os.compat")?.parse()?,
platform: query
.get("os.platform")
.or_not_found("os.platform")?
.deref()
.into(),
},
hardware: HardwareInfo {
arch: query
.get("hardware.arch")
.or_not_found("hardware.arch")?
.parse()?,
ram: query
.get("hardware.ram")
.or_not_found("hardware.ram")?
.parse()?,
devices: identity(query)
.split_off("hardware.device.")
.into_iter()
.filter_map(|(k, v)| {
k.strip_prefix("hardware.device.")
.map(|k| (k.into(), v.into_owned()))
})
.fold(BTreeMap::new(), |mut acc, (k, v)| {
let mut devs = acc.remove(&k).unwrap_or_default();
devs.push(v);
acc.insert(k, devs);
acc
}),
},
})
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
pub struct OsInfo {
#[ts(as = "VersionString")]
pub version: Version,
#[ts(type = "string")]
pub compat: VersionRange,
#[ts(type = "string")]
pub platform: InternedString,
}
impl From<&RpcContext> for OsInfo {
fn from(_: &RpcContext) -> Self {
Self {
version: crate::version::Current::new().semver(),
compat: crate::version::Current::new().compat().clone(),
platform: InternedString::intern(&*crate::PLATFORM),
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
pub struct HardwareInfo {
#[ts(type = "string")]
pub arch: InternedString,
#[ts(type = "number")]
pub ram: u64,
#[ts(as = "BTreeMap::<String, Vec<String>>")]
pub devices: BTreeMap<InternedString, Vec<String>>,
}
impl From<&RpcContext> for HardwareInfo {
fn from(value: &RpcContext) -> Self {
Self {
arch: InternedString::intern(&**crate::ARCH),
ram: value.hardware.ram,
devices: value
.hardware
.devices
.iter()
.fold(BTreeMap::new(), |mut acc, dev| {
let mut devs = acc.remove(dev.class()).unwrap_or_default();
devs.push(dev.product().to_owned());
acc.insert(dev.class().into(), devs);
acc
}),
}
}
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Metadata {
#[serde(default)]
get_device_info: bool,
}
#[derive(Clone)]
pub struct DeviceInfoMiddleware {
device_info: Option<HeaderValue>,
}
impl DeviceInfoMiddleware {
pub fn new() -> Self {
Self { device_info: None }
}
}
impl Middleware<RegistryContext> for DeviceInfoMiddleware {
type Metadata = Metadata;
async fn process_http_request(
&mut self,
_: &RegistryContext,
request: &mut Request,
) -> Result<(), Response> {
self.device_info = request.headers_mut().remove(DEVICE_INFO_HEADER);
Ok(())
}
async fn process_rpc_request(
&mut self,
_: &RegistryContext,
metadata: Self::Metadata,
request: &mut RpcRequest,
) -> Result<(), RpcResponse> {
async move {
if metadata.get_device_info {
if let Some(device_info) = &self.device_info {
request.params["__device_info"] =
to_value(&DeviceInfo::from_header_value(device_info)?)?;
}
}
Ok::<_, Error>(())
}
.await
.map_err(|e| RpcResponse::from_result(Err(e)))
}
}

View File

@@ -3,20 +3,23 @@ use std::net::SocketAddr;
use axum::Router;
use futures::future::ready;
use models::DataUrl;
use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler, Server};
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use crate::context::{CliContext};
use crate::context::CliContext;
use crate::middleware::cors::Cors;
use crate::net::static_server::{bad_request, not_found, server_error};
use crate::net::web_server::WebServer;
use crate::prelude::*;
use crate::registry::auth::Auth;
use crate::registry::context::{RegistryContext};
use crate::registry::context::RegistryContext;
use crate::registry::device_info::DeviceInfoMiddleware;
use crate::registry::os::index::OsIndex;
use crate::registry::package::index::PackageIndex;
use crate::registry::signer::SignerInfo;
use crate::rpc_continuations::RequestGuid;
use crate::rpc_continuations::Guid;
use crate::util::serde::HandlerExtSerde;
pub mod admin;
@@ -24,26 +27,29 @@ pub mod asset;
pub mod auth;
pub mod context;
pub mod db;
pub mod device_info;
pub mod os;
pub mod package;
pub mod signer;
#[derive(Debug, Default, Deserialize, Serialize, HasModel)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
pub struct RegistryDatabase {
pub admins: BTreeSet<RequestGuid>,
pub admins: BTreeSet<Guid>,
pub index: FullIndex,
}
impl RegistryDatabase {}
#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct FullIndex {
// pub package: PackageIndex,
pub icon: Option<DataUrl<'static>>,
pub package: PackageIndex,
pub os: OsIndex,
#[ts(as = "BTreeMap::<String, SignerInfo>")]
pub signers: BTreeMap<RequestGuid, SignerInfo>,
pub signers: BTreeMap<Guid, SignerInfo>,
}
pub async fn get_full_index(ctx: RegistryContext) -> Result<FullIndex, Error> {
@@ -59,6 +65,7 @@ pub fn registry_api<C: Context>() -> ParentHandler<C> {
.with_call_remote::<CliContext>(),
)
.subcommand("os", os::os_api::<C>())
.subcommand("package", package::package_api::<C>())
.subcommand("admin", admin::admin_api::<C>())
.subcommand("db", db::db_api::<C>())
}
@@ -72,7 +79,8 @@ pub fn registry_server_router(ctx: RegistryContext) -> Router {
post(
Server::new(move || ready(Ok(ctx.clone())), registry_api())
.middleware(Cors::new())
.middleware(Auth::new()),
.middleware(Auth::new())
.middleware(DeviceInfoMiddleware::new()),
)
})
.route(
@@ -81,7 +89,7 @@ pub fn registry_server_router(ctx: RegistryContext) -> Router {
let ctx = ctx.clone();
move |x::Path(path): x::Path<String>,
ws: axum::extract::ws::WebSocketUpgrade| async move {
match RequestGuid::from(&path) {
match Guid::from(&path) {
None => {
tracing::debug!("No Guid Path");
bad_request()
@@ -104,7 +112,7 @@ pub fn registry_server_router(ctx: RegistryContext) -> Router {
.path()
.strip_prefix("/rest/rpc/")
.unwrap_or_default();
match RequestGuid::from(&path) {
match Guid::from(&path) {
None => {
tracing::debug!("No Guid Path");
bad_request()

View File

@@ -1,17 +1,13 @@
use std::collections::BTreeMap;
use std::collections::{BTreeMap, HashMap};
use std::panic::UnwindSafe;
use std::path::PathBuf;
use std::time::Duration;
use axum::response::Response;
use clap::Parser;
use futures::{FutureExt, TryStreamExt};
use helpers::NonDetachingJoinHandle;
use imbl_value::InternedString;
use itertools::Itertools;
use rpc_toolkit::{from_fn_async, Context, HandlerArgs, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha512};
use ts_rs::TS;
use url::Url;
@@ -22,10 +18,15 @@ use crate::registry::asset::RegistryAsset;
use crate::registry::context::RegistryContext;
use crate::registry::os::index::OsVersionInfo;
use crate::registry::os::SIG_CONTEXT;
use crate::registry::signer::{Blake3Ed25519Signature, Signature, SignatureInfo, SignerKey};
use crate::rpc_continuations::{RequestGuid, RpcContinuation};
use crate::registry::signer::commitment::blake3::Blake3Commitment;
use crate::registry::signer::sign::ed25519::Ed25519;
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme};
use crate::s9pk::merkle_archive::hash::VerifyingWriter;
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::{Apply, Version};
use crate::util::serde::Base64;
use crate::util::VersionString;
pub fn add_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
@@ -53,39 +54,37 @@ pub fn add_api<C: Context>() -> ParentHandler<C> {
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct AddAssetParams {
#[ts(type = "string")]
pub url: Url,
pub signature: Signature,
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
#[ts(type = "string")]
pub platform: InternedString,
#[serde(default)]
pub upload: bool,
#[ts(type = "string")]
pub url: Url,
#[serde(rename = "__auth_signer")]
pub signer: SignerKey,
#[ts(skip)]
pub signer: AnyVerifyingKey,
pub signature: AnySignature,
pub commitment: Blake3Commitment,
}
async fn add_asset(
ctx: RegistryContext,
AddAssetParams {
url,
signature,
version,
platform,
upload,
url,
signer,
signature,
commitment,
}: AddAssetParams,
accessor: impl FnOnce(&mut Model<OsVersionInfo>) -> &mut Model<BTreeMap<InternedString, RegistryAsset>>
accessor: impl FnOnce(
&mut Model<OsVersionInfo>,
) -> &mut Model<BTreeMap<InternedString, RegistryAsset<Blake3Commitment>>>
+ UnwindSafe
+ Send,
) -> Result<Option<RequestGuid>, Error> {
ensure_code!(
signature.signer() == signer,
ErrorKind::InvalidSignature,
"asset signature does not match request signer"
);
) -> Result<(), Error> {
signer
.scheme()
.verify_commitment(&signer, &commitment, SIG_CONTEXT, &signature)?;
ctx.db
.mutate(|db| {
let signer_guid = db.as_index().as_signers().get_signer(&signer)?;
@@ -95,7 +94,7 @@ async fn add_asset(
.as_versions()
.as_idx(&version)
.or_not_found(&version)?
.as_signers()
.as_authorized()
.de()?
.contains(&signer_guid)
{
@@ -109,11 +108,21 @@ async fn add_asset(
.upsert(&platform, || {
Ok(RegistryAsset {
url,
signature_info: SignatureInfo::new(SIG_CONTEXT),
commitment: commitment.clone(),
signatures: HashMap::new(),
})
})?
.as_signature_info_mut()
.mutate(|s| s.add_sig(&signature))?;
.mutate(|s| {
if s.commitment != commitment {
Err(Error::new(
eyre!("commitment does not match"),
ErrorKind::InvalidSignature,
))
} else {
s.signatures.insert(signer, signature);
Ok(())
}
})?;
Ok(())
} else {
Err(Error::new(eyre!("UNAUTHORIZED"), ErrorKind::Authorization))
@@ -121,80 +130,18 @@ async fn add_asset(
})
.await?;
let guid = if upload {
let guid = RequestGuid::new();
let auth_guid = guid.clone();
let signer = signature.signer();
let hostname = ctx.hostname.clone();
ctx.rpc_continuations
.add(
guid.clone(),
RpcContinuation::rest(
Box::new(|req| {
async move {
Ok(
if async move {
let auth_sig = base64::decode(
req.headers().get("X-StartOS-Registry-Auth-Sig")?,
)
.ok()?;
signer
.verify_message(
auth_guid.as_ref().as_bytes(),
&auth_sig,
&hostname,
)
.ok()?;
Some(())
}
.await
.is_some()
{
Response::builder()
.status(200)
.body(axum::body::Body::empty())
.with_kind(ErrorKind::Network)?
} else {
Response::builder()
.status(401)
.body(axum::body::Body::empty())
.with_kind(ErrorKind::Network)?
},
)
}
.boxed()
}),
Duration::from_secs(30),
),
)
.await;
Some(guid)
} else {
None
};
Ok(guid)
Ok(())
}
pub async fn add_iso(
ctx: RegistryContext,
params: AddAssetParams,
) -> Result<Option<RequestGuid>, Error> {
pub async fn add_iso(ctx: RegistryContext, params: AddAssetParams) -> Result<(), Error> {
add_asset(ctx, params, |m| m.as_iso_mut()).await
}
pub async fn add_img(
ctx: RegistryContext,
params: AddAssetParams,
) -> Result<Option<RequestGuid>, Error> {
pub async fn add_img(ctx: RegistryContext, params: AddAssetParams) -> Result<(), Error> {
add_asset(ctx, params, |m| m.as_img_mut()).await
}
pub async fn add_squashfs(
ctx: RegistryContext,
params: AddAssetParams,
) -> Result<Option<RequestGuid>, Error> {
pub async fn add_squashfs(ctx: RegistryContext, params: AddAssetParams) -> Result<(), Error> {
add_asset(ctx, params, |m| m.as_squashfs_mut()).await
}
@@ -205,11 +152,9 @@ pub struct CliAddAssetParams {
#[arg(short = 'p', long = "platform")]
pub platform: InternedString,
#[arg(short = 'v', long = "version")]
pub version: Version,
pub version: VersionString,
pub file: PathBuf,
pub url: Url,
#[arg(short = 'u', long = "upload")]
pub upload: bool,
}
pub async fn cli_add_asset(
@@ -223,7 +168,6 @@ pub async fn cli_add_asset(
version,
file: path,
url,
upload,
},
..
}: HandlerArgs<CliContext, CliAddAssetParams>,
@@ -240,21 +184,18 @@ pub async fn cli_add_asset(
}
};
let file = tokio::fs::File::open(&path).await?.into();
let file = MultiCursorFile::from(tokio::fs::File::open(&path).await?);
let mut progress = FullProgressTracker::new();
let progress_handle = progress.handle();
let mut sign_phase =
progress_handle.add_phase(InternedString::intern("Signing File"), Some(10));
let mut verify_phase =
progress_handle.add_phase(InternedString::intern("Verifying URL"), Some(100));
let mut index_phase = progress_handle.add_phase(
InternedString::intern("Adding File to Registry Index"),
Some(1),
);
let mut upload_phase = if upload {
Some(progress_handle.add_phase(InternedString::intern("Uploading File"), Some(100)))
} else {
None
};
let progress_task: NonDetachingJoinHandle<()> = tokio::spawn(async move {
let mut bar = PhasedProgressBar::new(&format!("Adding {} to registry...", path.display()));
@@ -270,70 +211,46 @@ pub async fn cli_add_asset(
.into();
sign_phase.start();
let blake3_sig =
Blake3Ed25519Signature::sign_file(ctx.developer_key()?, &file, SIG_CONTEXT).await?;
let size = blake3_sig.size;
let signature = Signature::Blake3Ed25519(blake3_sig);
let blake3 = file.blake3_mmap().await?;
let size = file
.size()
.await
.ok_or_else(|| Error::new(eyre!("failed to read file metadata"), ErrorKind::Filesystem))?;
let commitment = Blake3Commitment {
hash: Base64(*blake3.as_bytes()),
size,
};
let signature = Ed25519.sign_commitment(ctx.developer_key()?, &commitment, SIG_CONTEXT)?;
sign_phase.complete();
index_phase.start();
let add_res = from_value::<Option<RequestGuid>>(
ctx.call_remote::<RegistryContext>(
&parent_method
.into_iter()
.chain(method)
.chain([ext])
.join("."),
imbl_value::json!({
"platform": platform,
"version": version,
"url": &url,
"signature": signature,
"upload": upload,
}),
)
.await?,
)?;
index_phase.complete();
verify_phase.start();
let src = HttpSource::new(ctx.client.clone(), url.clone()).await?;
let mut writer = verify_phase.writer(VerifyingWriter::new(
tokio::io::sink(),
Some((blake3::Hash::from_bytes(*commitment.hash), commitment.size)),
));
src.copy_all_to(&mut writer).await?;
let (verifier, mut verify_phase) = writer.into_inner();
verifier.verify().await?;
verify_phase.complete();
if let Some(guid) = add_res {
upload_phase.as_mut().map(|p| p.start());
upload_phase.as_mut().map(|p| p.set_total(size));
let reg_url = ctx.registry_url.as_ref().or_not_found("--registry")?;
ctx.client
.post(url)
.header("X-StartOS-Registry-Token", guid.as_ref())
.header(
"X-StartOS-Registry-Auth-Sig",
base64::encode(
ctx.developer_key()?
.sign_prehashed(
Sha512::new_with_prefix(guid.as_ref().as_bytes()),
Some(
reg_url
.host()
.or_not_found("registry hostname")?
.to_string()
.as_bytes(),
),
)?
.to_bytes(),
),
)
.body(reqwest::Body::wrap_stream(
tokio_util::io::ReaderStream::new(file.fetch(0, size).await?).inspect_ok(
move |b| {
upload_phase
.as_mut()
.map(|p| *p += b.len() as u64)
.apply(|_| ())
},
),
))
.send()
.await?;
// upload_phase.as_mut().map(|p| p.complete());
}
index_phase.start();
ctx.call_remote::<RegistryContext>(
&parent_method
.into_iter()
.chain(method)
.chain([ext])
.join("."),
imbl_value::json!({
"platform": platform,
"version": version,
"url": &url,
"signature": signature,
"commitment": commitment,
}),
)
.await?;
index_phase.complete();
progress_handle.complete();

View File

@@ -16,8 +16,11 @@ use crate::progress::{FullProgressTracker, PhasedProgressBar};
use crate::registry::asset::RegistryAsset;
use crate::registry::context::RegistryContext;
use crate::registry::os::index::OsVersionInfo;
use crate::registry::os::SIG_CONTEXT;
use crate::registry::signer::commitment::blake3::Blake3Commitment;
use crate::registry::signer::commitment::Commitment;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::util::Version;
use crate::util::VersionString;
pub fn get_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
@@ -33,8 +36,7 @@ pub fn get_api<C: Context>() -> ParentHandler<C> {
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct GetOsAssetParams {
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
#[ts(type = "string")]
pub platform: InternedString,
}
@@ -42,10 +44,12 @@ pub struct GetOsAssetParams {
async fn get_os_asset(
ctx: RegistryContext,
GetOsAssetParams { version, platform }: GetOsAssetParams,
accessor: impl FnOnce(&Model<OsVersionInfo>) -> &Model<BTreeMap<InternedString, RegistryAsset>>
accessor: impl FnOnce(
&Model<OsVersionInfo>,
) -> &Model<BTreeMap<InternedString, RegistryAsset<Blake3Commitment>>>
+ UnwindSafe
+ Send,
) -> Result<RegistryAsset, Error> {
) -> Result<RegistryAsset<Blake3Commitment>, Error> {
accessor(
ctx.db
.peek()
@@ -64,21 +68,21 @@ async fn get_os_asset(
pub async fn get_iso(
ctx: RegistryContext,
params: GetOsAssetParams,
) -> Result<RegistryAsset, Error> {
) -> Result<RegistryAsset<Blake3Commitment>, Error> {
get_os_asset(ctx, params, |info| info.as_iso()).await
}
pub async fn get_img(
ctx: RegistryContext,
params: GetOsAssetParams,
) -> Result<RegistryAsset, Error> {
) -> Result<RegistryAsset<Blake3Commitment>, Error> {
get_os_asset(ctx, params, |info| info.as_img()).await
}
pub async fn get_squashfs(
ctx: RegistryContext,
params: GetOsAssetParams,
) -> Result<RegistryAsset, Error> {
) -> Result<RegistryAsset<Blake3Commitment>, Error> {
get_os_asset(ctx, params, |info| info.as_squashfs()).await
}
@@ -86,7 +90,7 @@ pub async fn get_squashfs(
#[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")]
pub struct CliGetOsAssetParams {
pub version: Version,
pub version: VersionString,
pub platform: InternedString,
#[arg(long = "download", short = 'd')]
pub download: Option<PathBuf>,
@@ -112,8 +116,8 @@ async fn cli_get_os_asset(
},
..
}: HandlerArgs<CliContext, CliGetOsAssetParams>,
) -> Result<RegistryAsset, Error> {
let res = from_value::<RegistryAsset>(
) -> Result<RegistryAsset<Blake3Commitment>, Error> {
let res = from_value::<RegistryAsset<Blake3Commitment>>(
ctx.call_remote::<RegistryContext>(
&parent_method.into_iter().chain(method).join("."),
json!({
@@ -124,7 +128,7 @@ async fn cli_get_os_asset(
.await?,
)?;
let validator = res.validate(res.signature_info.all_signers())?;
res.validate(SIG_CONTEXT, res.all_signers())?;
if let Some(download) = download {
let mut file = AtomicFile::new(&download, None::<&Path>)
@@ -135,7 +139,7 @@ async fn cli_get_os_asset(
let progress_handle = progress.handle();
let mut download_phase =
progress_handle.add_phase(InternedString::intern("Downloading File"), Some(100));
download_phase.set_total(validator.size()?);
download_phase.set_total(res.commitment.size);
let reverify_phase = if reverify {
Some(progress_handle.add_phase(InternedString::intern("Reverifying File"), Some(10)))
} else {
@@ -157,7 +161,7 @@ async fn cli_get_os_asset(
download_phase.start();
let mut download_writer = download_phase.writer(&mut *file);
res.download(ctx.client.clone(), &mut download_writer, &validator)
res.download(ctx.client.clone(), &mut download_writer)
.await?;
let (_, mut download_phase) = download_writer.into_inner();
file.save().await.with_kind(ErrorKind::Filesystem)?;
@@ -165,8 +169,8 @@ async fn cli_get_os_asset(
if let Some(mut reverify_phase) = reverify_phase {
reverify_phase.start();
validator
.validate_file(&MultiCursorFile::from(
res.commitment
.check(&MultiCursorFile::from(
tokio::fs::File::open(download).await?,
))
.await?;

View File

@@ -17,25 +17,47 @@ use crate::registry::asset::RegistryAsset;
use crate::registry::context::RegistryContext;
use crate::registry::os::index::OsVersionInfo;
use crate::registry::os::SIG_CONTEXT;
use crate::registry::signer::{Blake3Ed25519Signature, Signature};
use crate::util::Version;
use crate::registry::signer::commitment::blake3::Blake3Commitment;
use crate::registry::signer::sign::ed25519::Ed25519;
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme};
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::serde::Base64;
use crate::util::VersionString;
pub fn sign_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
.subcommand("iso", from_fn_async(sign_iso).no_cli())
.subcommand("img", from_fn_async(sign_img).no_cli())
.subcommand("squashfs", from_fn_async(sign_squashfs).no_cli())
.subcommand(
"iso",
from_fn_async(sign_iso)
.with_metadata("getSigner", Value::Bool(true))
.no_cli(),
)
.subcommand(
"img",
from_fn_async(sign_img)
.with_metadata("getSigner", Value::Bool(true))
.no_cli(),
)
.subcommand(
"squashfs",
from_fn_async(sign_squashfs)
.with_metadata("getSigner", Value::Bool(true))
.no_cli(),
)
}
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct SignAssetParams {
#[ts(type = "string")]
version: Version,
version: VersionString,
#[ts(type = "string")]
platform: InternedString,
signature: Signature,
#[ts(skip)]
#[serde(rename = "__auth_signer")]
signer: AnyVerifyingKey,
signature: AnySignature,
}
async fn sign_asset(
@@ -43,22 +65,25 @@ async fn sign_asset(
SignAssetParams {
version,
platform,
signer,
signature,
}: SignAssetParams,
accessor: impl FnOnce(&mut Model<OsVersionInfo>) -> &mut Model<BTreeMap<InternedString, RegistryAsset>>
accessor: impl FnOnce(
&mut Model<OsVersionInfo>,
) -> &mut Model<BTreeMap<InternedString, RegistryAsset<Blake3Commitment>>>
+ UnwindSafe
+ Send,
) -> Result<(), Error> {
ctx.db
.mutate(|db| {
let guid = db.as_index().as_signers().get_signer(&signature.signer())?;
let guid = db.as_index().as_signers().get_signer(&signer)?;
if !db
.as_index()
.as_os()
.as_versions()
.as_idx(&version)
.or_not_found(&version)?
.as_signers()
.as_authorized()
.de()?
.contains(&guid)
{
@@ -77,8 +102,16 @@ async fn sign_asset(
)
.as_idx_mut(&platform)
.or_not_found(&platform)?
.as_signature_info_mut()
.mutate(|s| s.add_sig(&signature))?;
.mutate(|s| {
signer.scheme().verify_commitment(
&signer,
&s.commitment,
SIG_CONTEXT,
&signature,
)?;
s.signatures.insert(signer, signature);
Ok(())
})?;
Ok(())
})
@@ -104,7 +137,7 @@ pub struct CliSignAssetParams {
#[arg(short = 'p', long = "platform")]
pub platform: InternedString,
#[arg(short = 'v', long = "version")]
pub version: Version,
pub version: VersionString,
pub file: PathBuf,
}
@@ -134,7 +167,7 @@ pub async fn cli_sign_asset(
}
};
let file = tokio::fs::File::open(&path).await?.into();
let file = MultiCursorFile::from(tokio::fs::File::open(&path).await?);
let mut progress = FullProgressTracker::new();
let progress_handle = progress.handle();
@@ -159,9 +192,16 @@ pub async fn cli_sign_asset(
.into();
sign_phase.start();
let blake3_sig =
Blake3Ed25519Signature::sign_file(ctx.developer_key()?, &file, SIG_CONTEXT).await?;
let signature = Signature::Blake3Ed25519(blake3_sig);
let blake3 = file.blake3_mmap().await?;
let size = file
.size()
.await
.ok_or_else(|| Error::new(eyre!("failed to read file metadata"), ErrorKind::Filesystem))?;
let commitment = Blake3Commitment {
hash: Base64(*blake3.as_bytes()),
size,
};
let signature = Ed25519.sign_commitment(ctx.developer_key()?, &commitment, SIG_CONTEXT)?;
sign_phase.complete();
index_phase.start();

View File

@@ -8,16 +8,16 @@ use ts_rs::TS;
use crate::prelude::*;
use crate::registry::asset::RegistryAsset;
use crate::registry::context::RegistryContext;
use crate::rpc_continuations::RequestGuid;
use crate::util::Version;
use crate::registry::signer::commitment::blake3::Blake3Commitment;
use crate::rpc_continuations::Guid;
use crate::util::VersionString;
#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct OsIndex {
#[ts(as = "BTreeMap::<String, OsVersionInfo>")]
pub versions: BTreeMap<Version, OsVersionInfo>,
pub versions: BTreeMap<VersionString, OsVersionInfo>,
}
#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)]
@@ -29,14 +29,13 @@ pub struct OsVersionInfo {
pub release_notes: String,
#[ts(type = "string")]
pub source_version: VersionRange,
#[ts(type = "string[]")]
pub signers: BTreeSet<RequestGuid>,
#[ts(as = "BTreeMap::<String, RegistryAsset>")]
pub iso: BTreeMap<InternedString, RegistryAsset>, // platform (i.e. x86_64-nonfree) -> asset
#[ts(as = "BTreeMap::<String, RegistryAsset>")]
pub squashfs: BTreeMap<InternedString, RegistryAsset>, // platform (i.e. x86_64-nonfree) -> asset
#[ts(as = "BTreeMap::<String, RegistryAsset>")]
pub img: BTreeMap<InternedString, RegistryAsset>, // platform (i.e. raspberrypi) -> asset
pub authorized: BTreeSet<Guid>,
#[ts(as = "BTreeMap::<String, RegistryAsset::<Blake3Commitment>>")]
pub iso: BTreeMap<InternedString, RegistryAsset<Blake3Commitment>>, // platform (i.e. x86_64-nonfree) -> asset
#[ts(as = "BTreeMap::<String, RegistryAsset::<Blake3Commitment>>")]
pub squashfs: BTreeMap<InternedString, RegistryAsset<Blake3Commitment>>, // platform (i.e. x86_64-nonfree) -> asset
#[ts(as = "BTreeMap::<String, RegistryAsset::<Blake3Commitment>>")]
pub img: BTreeMap<InternedString, RegistryAsset<Blake3Commitment>>, // platform (i.e. raspberrypi) -> asset
}
pub async fn get_os_index(ctx: RegistryContext) -> Result<OsIndex, Error> {

View File

@@ -11,9 +11,9 @@ use crate::context::CliContext;
use crate::prelude::*;
use crate::registry::context::RegistryContext;
use crate::registry::os::index::OsVersionInfo;
use crate::registry::signer::SignerKey;
use crate::registry::signer::sign::AnyVerifyingKey;
use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat};
use crate::util::Version;
use crate::util::VersionString;
pub mod signer;
@@ -51,8 +51,7 @@ pub fn version_api<C: Context>() -> ParentHandler<C> {
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct AddVersionParams {
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
pub headline: String,
pub release_notes: String,
#[ts(type = "string")]
@@ -60,7 +59,7 @@ pub struct AddVersionParams {
#[arg(skip)]
#[ts(skip)]
#[serde(rename = "__auth_signer")]
pub signer: Option<SignerKey>,
pub signer: Option<AnyVerifyingKey>,
}
pub async fn add_version(
@@ -86,7 +85,7 @@ pub async fn add_version(
i.headline = headline;
i.release_notes = release_notes;
i.source_version = source_version;
i.signers.extend(signer);
i.authorized.extend(signer);
Ok(())
})
})
@@ -98,8 +97,7 @@ pub async fn add_version(
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct RemoveVersionParams {
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
}
pub async fn remove_version(
@@ -124,7 +122,7 @@ pub async fn remove_version(
pub struct GetVersionParams {
#[ts(type = "string | null")]
#[arg(long = "src")]
pub source: Option<Version>,
pub source: Option<VersionString>,
#[ts(type = "string | null")]
#[arg(long = "target")]
pub target: Option<VersionRange>,
@@ -133,7 +131,7 @@ pub struct GetVersionParams {
pub async fn get_version(
ctx: RegistryContext,
GetVersionParams { source, target }: GetVersionParams,
) -> Result<BTreeMap<Version, OsVersionInfo>, Error> {
) -> Result<BTreeMap<VersionString, OsVersionInfo>, Error> {
let target = target.unwrap_or(VersionRange::Any);
ctx.db
.peek()
@@ -153,7 +151,10 @@ pub async fn get_version(
.collect()
}
pub fn display_version_info<T>(params: WithIoFormat<T>, info: BTreeMap<Version, OsVersionInfo>) {
pub fn display_version_info<T>(
params: WithIoFormat<T>,
info: BTreeMap<VersionString, OsVersionInfo>,
) {
use prettytable::*;
if let Some(format) = params.format {

View File

@@ -10,9 +10,9 @@ use crate::prelude::*;
use crate::registry::admin::display_signers;
use crate::registry::context::RegistryContext;
use crate::registry::signer::SignerInfo;
use crate::rpc_continuations::RequestGuid;
use crate::rpc_continuations::Guid;
use crate::util::serde::HandlerExtSerde;
use crate::util::Version;
use crate::util::VersionString;
pub fn signer_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
@@ -44,10 +44,8 @@ pub fn signer_api<C: Context>() -> ParentHandler<C> {
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct VersionSignerParams {
#[ts(type = "string")]
pub version: Version,
#[ts(type = "string")]
pub signer: RequestGuid,
pub version: VersionString,
pub signer: Guid,
}
pub async fn add_version_signer(
@@ -67,7 +65,7 @@ pub async fn add_version_signer(
.as_versions_mut()
.as_idx_mut(&version)
.or_not_found(&version)?
.as_signers_mut()
.as_authorized_mut()
.mutate(|s| Ok(s.insert(signer)))?;
Ok(())
@@ -87,7 +85,7 @@ pub async fn remove_version_signer(
.as_versions_mut()
.as_idx_mut(&version)
.or_not_found(&version)?
.as_signers_mut()
.as_authorized_mut()
.mutate(|s| Ok(s.remove(&signer)))?
{
return Err(Error::new(
@@ -106,21 +104,20 @@ pub async fn remove_version_signer(
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct ListVersionSignersParams {
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
}
pub async fn list_version_signers(
ctx: RegistryContext,
ListVersionSignersParams { version }: ListVersionSignersParams,
) -> Result<BTreeMap<RequestGuid, SignerInfo>, Error> {
) -> Result<BTreeMap<Guid, SignerInfo>, Error> {
let db = ctx.db.peek().await;
db.as_index()
.as_os()
.as_versions()
.as_idx(&version)
.or_not_found(&version)?
.as_signers()
.as_authorized()
.de()?
.into_iter()
.filter_map(|guid| {

View File

@@ -0,0 +1,170 @@
use std::path::PathBuf;
use std::sync::Arc;
use clap::Parser;
use helpers::NonDetachingJoinHandle;
use imbl_value::InternedString;
use itertools::Itertools;
use rpc_toolkit::HandlerArgs;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use url::Url;
use crate::context::CliContext;
use crate::prelude::*;
use crate::progress::{FullProgressTracker, PhasedProgressBar};
use crate::registry::context::RegistryContext;
use crate::registry::package::index::PackageVersionInfo;
use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment;
use crate::registry::signer::sign::ed25519::Ed25519;
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme};
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::v2::SIG_CONTEXT;
use crate::s9pk::S9pk;
use crate::util::io::TrackingIO;
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct AddPackageParams {
#[ts(type = "string")]
pub url: Url,
#[ts(skip)]
#[serde(rename = "__auth_signer")]
pub uploader: AnyVerifyingKey,
pub commitment: MerkleArchiveCommitment,
pub signature: AnySignature,
}
pub async fn add_package(
ctx: RegistryContext,
AddPackageParams {
url,
uploader,
commitment,
signature,
}: AddPackageParams,
) -> Result<(), Error> {
uploader
.scheme()
.verify_commitment(&uploader, &commitment, SIG_CONTEXT, &signature)?;
let peek = ctx.db.peek().await;
let uploader_guid = peek.as_index().as_signers().get_signer(&uploader)?;
let s9pk = S9pk::deserialize(
&Arc::new(HttpSource::new(ctx.client.clone(), url.clone()).await?),
Some(&commitment),
false,
)
.await?;
let manifest = s9pk.as_manifest();
let mut info = PackageVersionInfo::from_s9pk(&s9pk, url).await?;
if !info.s9pk.signatures.contains_key(&uploader) {
info.s9pk.signatures.insert(uploader.clone(), signature);
}
ctx.db
.mutate(|db| {
if db.as_admins().de()?.contains(&uploader_guid)
|| db
.as_index()
.as_package()
.as_packages()
.as_idx(&manifest.id)
.or_not_found(&manifest.id)?
.as_authorized()
.de()?
.contains(&uploader_guid)
{
let package = db
.as_index_mut()
.as_package_mut()
.as_packages_mut()
.upsert(&manifest.id, || Ok(Default::default()))?;
package.as_versions_mut().insert(&manifest.version, &info)?;
Ok(())
} else {
Err(Error::new(eyre!("UNAUTHORIZED"), ErrorKind::Authorization))
}
})
.await
}
#[derive(Debug, Deserialize, Serialize, Parser)]
#[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")]
pub struct CliAddPackageParams {
pub file: PathBuf,
pub url: Url,
}
pub async fn cli_add_package(
HandlerArgs {
context: ctx,
parent_method,
method,
params: CliAddPackageParams { file, url },
..
}: HandlerArgs<CliContext, CliAddPackageParams>,
) -> Result<(), Error> {
let s9pk = S9pk::open(&file, None, false).await?;
let mut progress = FullProgressTracker::new();
let progress_handle = progress.handle();
let mut sign_phase = progress_handle.add_phase(InternedString::intern("Signing File"), Some(1));
let mut verify_phase =
progress_handle.add_phase(InternedString::intern("Verifying URL"), Some(100));
let mut index_phase = progress_handle.add_phase(
InternedString::intern("Adding File to Registry Index"),
Some(1),
);
let progress_task: NonDetachingJoinHandle<()> = tokio::spawn(async move {
let mut bar = PhasedProgressBar::new(&format!("Adding {} to registry...", file.display()));
loop {
let snap = progress.snapshot();
bar.update(&snap);
if snap.overall.is_complete() {
break;
}
progress.changed().await
}
})
.into();
sign_phase.start();
let commitment = s9pk.as_archive().commitment().await?;
let signature = Ed25519.sign_commitment(ctx.developer_key()?, &commitment, SIG_CONTEXT)?;
sign_phase.complete();
verify_phase.start();
let mut src = S9pk::deserialize(
&Arc::new(HttpSource::new(ctx.client.clone(), url.clone()).await?),
Some(&commitment),
false,
)
.await?;
src.serialize(&mut TrackingIO::new(0, tokio::io::sink()), true)
.await?;
verify_phase.complete();
index_phase.start();
ctx.call_remote::<RegistryContext>(
&parent_method.into_iter().chain(method).join("."),
imbl_value::json!({
"url": &url,
"signature": signature,
"commitment": commitment,
}),
)
.await?;
index_phase.complete();
progress_handle.complete();
progress_task.await.with_kind(ErrorKind::Unknown)?;
Ok(())
}

View File

@@ -0,0 +1,387 @@
use std::collections::{BTreeMap, BTreeSet};
use clap::{Parser, ValueEnum};
use emver::{Version, VersionRange};
use imbl_value::InternedString;
use itertools::Itertools;
use models::PackageId;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use crate::prelude::*;
use crate::registry::context::RegistryContext;
use crate::registry::device_info::DeviceInfo;
use crate::registry::package::index::{PackageIndex, PackageVersionInfo};
use crate::util::serde::{display_serializable, WithIoFormat};
use crate::util::VersionString;
#[derive(
Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS, ValueEnum,
)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub enum PackageDetailLevel {
Short,
Full,
}
impl Default for PackageDetailLevel {
fn default() -> Self {
Self::Short
}
}
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct PackageInfoShort {
pub release_notes: String,
}
#[derive(Debug, Deserialize, Serialize, TS, Parser)]
#[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")]
#[ts(export)]
pub struct GetPackageParams {
pub id: Option<PackageId>,
#[ts(type = "string | null")]
pub version: Option<VersionRange>,
#[ts(type = "string | null")]
pub source_version: Option<Version>,
#[ts(skip)]
#[arg(skip)]
#[serde(rename = "__device_info")]
pub device_info: Option<DeviceInfo>,
pub other_versions: Option<PackageDetailLevel>,
}
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct GetPackageResponse {
#[ts(type = "string[]")]
pub categories: BTreeSet<InternedString>,
pub best: BTreeMap<VersionString, PackageVersionInfo>,
#[serde(skip_serializing_if = "Option::is_none")]
#[ts(optional)]
pub other_versions: Option<BTreeMap<VersionString, PackageInfoShort>>,
}
impl GetPackageResponse {
pub fn tables(&self) -> Vec<prettytable::Table> {
use prettytable::*;
let mut res = Vec::with_capacity(self.best.len());
for (version, info) in &self.best {
let mut table = info.table(version);
let lesser_versions: BTreeMap<_, _> = self
.other_versions
.as_ref()
.into_iter()
.flatten()
.filter(|(v, _)| ***v < **version)
.collect();
if !lesser_versions.is_empty() {
table.add_row(row![bc => "OLDER VERSIONS"]);
table.add_row(row![bc => "VERSION", "RELEASE NOTES"]);
for (version, info) in lesser_versions {
table.add_row(row![AsRef::<str>::as_ref(version), &info.release_notes]);
}
}
res.push(table);
}
res
}
}
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct GetPackageResponseFull {
#[ts(type = "string[]")]
pub categories: BTreeSet<InternedString>,
pub best: BTreeMap<VersionString, PackageVersionInfo>,
pub other_versions: BTreeMap<VersionString, PackageVersionInfo>,
}
impl GetPackageResponseFull {
pub fn tables(&self) -> Vec<prettytable::Table> {
let mut res = Vec::with_capacity(self.best.len());
let all: BTreeMap<_, _> = self.best.iter().chain(self.other_versions.iter()).collect();
for (version, info) in all {
res.push(info.table(version));
}
res
}
}
pub type GetPackagesResponse = BTreeMap<PackageId, GetPackageResponse>;
pub type GetPackagesResponseFull = BTreeMap<PackageId, GetPackageResponseFull>;
fn get_matching_models<'a>(
db: &'a Model<PackageIndex>,
GetPackageParams {
id,
version,
source_version,
device_info,
..
}: &GetPackageParams,
) -> Result<Vec<(PackageId, Version, &'a Model<PackageVersionInfo>)>, Error> {
if let Some(id) = id {
if let Some(pkg) = db.as_packages().as_idx(id) {
vec![(id.clone(), pkg)]
} else {
vec![]
}
} else {
db.as_packages().as_entries()?
}
.iter()
.map(|(k, v)| {
Ok(v.as_versions()
.as_entries()?
.into_iter()
.map(|(v, info)| {
Ok::<_, Error>(
if version
.as_ref()
.map_or(true, |version| v.satisfies(version))
&& source_version.as_ref().map_or(Ok(true), |source_version| {
Ok::<_, Error>(
source_version.satisfies(
&info
.as_source_version()
.de()?
.unwrap_or(VersionRange::any()),
),
)
})?
&& device_info
.as_ref()
.map_or(Ok(true), |device_info| info.works_for_device(device_info))?
{
Some((k.clone(), Version::from(v), info))
} else {
None
},
)
})
.flatten_ok())
})
.flatten_ok()
.map(|res| res.and_then(|a| a))
.collect()
}
pub async fn get_package(ctx: RegistryContext, params: GetPackageParams) -> Result<Value, Error> {
use patch_db::ModelExt;
let peek = ctx.db.peek().await;
let mut best: BTreeMap<PackageId, BTreeMap<VersionString, &Model<PackageVersionInfo>>> =
Default::default();
let mut other: BTreeMap<PackageId, BTreeMap<VersionString, &Model<PackageVersionInfo>>> =
Default::default();
for (id, version, info) in get_matching_models(&peek.as_index().as_package(), &params)? {
let mut package_best = best.remove(&id).unwrap_or_default();
let mut package_other = other.remove(&id).unwrap_or_default();
for worse_version in package_best
.keys()
.filter(|k| ***k < version)
.cloned()
.collect_vec()
{
if let Some(info) = package_best.remove(&worse_version) {
package_other.insert(worse_version, info);
}
}
if package_best.keys().all(|k| !(**k > version)) {
package_best.insert(version.into(), info);
}
best.insert(id.clone(), package_best);
if params.other_versions.is_some() {
other.insert(id.clone(), package_other);
}
}
if let Some(id) = params.id {
let categories = peek
.as_index()
.as_package()
.as_packages()
.as_idx(&id)
.map(|p| p.as_categories().de())
.transpose()?
.unwrap_or_default();
let best = best
.remove(&id)
.unwrap_or_default()
.into_iter()
.map(|(k, v)| v.de().map(|v| (k, v)))
.try_collect()?;
let other = other.remove(&id).unwrap_or_default();
match params.other_versions {
None => to_value(&GetPackageResponse {
categories,
best,
other_versions: None,
}),
Some(PackageDetailLevel::Short) => to_value(&GetPackageResponse {
categories,
best,
other_versions: Some(
other
.into_iter()
.map(|(k, v)| from_value(v.as_value().clone()).map(|v| (k, v)))
.try_collect()?,
),
}),
Some(PackageDetailLevel::Full) => to_value(&GetPackageResponseFull {
categories,
best,
other_versions: other
.into_iter()
.map(|(k, v)| v.de().map(|v| (k, v)))
.try_collect()?,
}),
}
} else {
match params.other_versions {
None => to_value(
&best
.into_iter()
.map(|(id, best)| {
let categories = peek
.as_index()
.as_package()
.as_packages()
.as_idx(&id)
.map(|p| p.as_categories().de())
.transpose()?
.unwrap_or_default();
Ok::<_, Error>((
id,
GetPackageResponse {
categories,
best: best
.into_iter()
.map(|(k, v)| v.de().map(|v| (k, v)))
.try_collect()?,
other_versions: None,
},
))
})
.try_collect::<_, GetPackagesResponse, _>()?,
),
Some(PackageDetailLevel::Short) => to_value(
&best
.into_iter()
.map(|(id, best)| {
let categories = peek
.as_index()
.as_package()
.as_packages()
.as_idx(&id)
.map(|p| p.as_categories().de())
.transpose()?
.unwrap_or_default();
let other = other.remove(&id).unwrap_or_default();
Ok::<_, Error>((
id,
GetPackageResponse {
categories,
best: best
.into_iter()
.map(|(k, v)| v.de().map(|v| (k, v)))
.try_collect()?,
other_versions: Some(
other
.into_iter()
.map(|(k, v)| {
from_value(v.as_value().clone()).map(|v| (k, v))
})
.try_collect()?,
),
},
))
})
.try_collect::<_, GetPackagesResponse, _>()?,
),
Some(PackageDetailLevel::Full) => to_value(
&best
.into_iter()
.map(|(id, best)| {
let categories = peek
.as_index()
.as_package()
.as_packages()
.as_idx(&id)
.map(|p| p.as_categories().de())
.transpose()?
.unwrap_or_default();
let other = other.remove(&id).unwrap_or_default();
Ok::<_, Error>((
id,
GetPackageResponseFull {
categories,
best: best
.into_iter()
.map(|(k, v)| v.de().map(|v| (k, v)))
.try_collect()?,
other_versions: other
.into_iter()
.map(|(k, v)| v.de().map(|v| (k, v)))
.try_collect()?,
},
))
})
.try_collect::<_, GetPackagesResponseFull, _>()?,
),
}
}
}
pub fn display_package_info(
params: WithIoFormat<GetPackageParams>,
info: Value,
) -> Result<(), Error> {
if let Some(format) = params.format {
display_serializable(format, info);
return Ok(());
}
if let Some(_) = params.rest.id {
if params.rest.other_versions == Some(PackageDetailLevel::Full) {
for table in from_value::<GetPackageResponseFull>(info)?.tables() {
table.print_tty(false)?;
println!();
}
} else {
for table in from_value::<GetPackageResponse>(info)?.tables() {
table.print_tty(false)?;
println!();
}
}
} else {
if params.rest.other_versions == Some(PackageDetailLevel::Full) {
for (_, package) in from_value::<GetPackagesResponseFull>(info)? {
for table in package.tables() {
table.print_tty(false)?;
println!();
}
}
} else {
for (_, package) in from_value::<GetPackagesResponse>(info)? {
for table in package.tables() {
table.print_tty(false)?;
println!();
}
}
}
}
Ok(())
}

View File

@@ -0,0 +1,163 @@
use std::collections::{BTreeMap, BTreeSet};
use emver::{Version, VersionRange};
use imbl_value::InternedString;
use models::{DataUrl, PackageId, VersionString};
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::registry::asset::RegistryAsset;
use crate::registry::context::RegistryContext;
use crate::registry::device_info::DeviceInfo;
use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment;
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey};
use crate::rpc_continuations::Guid;
use crate::s9pk::git_hash::GitHash;
use crate::s9pk::manifest::{Description, HardwareRequirements};
use crate::s9pk::merkle_archive::source::FileSource;
use crate::s9pk::S9pk;
#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct PackageIndex {
#[ts(as = "BTreeMap::<String, Category>")]
pub categories: BTreeMap<InternedString, Category>,
pub packages: BTreeMap<PackageId, PackageInfo>,
}
#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct PackageInfo {
pub authorized: BTreeSet<Guid>,
pub versions: BTreeMap<VersionString, PackageVersionInfo>,
#[ts(type = "string[]")]
pub categories: BTreeSet<InternedString>,
}
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct Category {
pub name: String,
pub description: Description,
}
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct PackageVersionInfo {
pub title: String,
pub icon: DataUrl<'static>,
pub description: Description,
pub release_notes: String,
#[ts(type = "string")]
pub git_hash: GitHash,
#[ts(type = "string")]
pub license: InternedString,
#[ts(type = "string")]
pub wrapper_repo: Url,
#[ts(type = "string")]
pub upstream_repo: Url,
#[ts(type = "string")]
pub support_site: Url,
#[ts(type = "string")]
pub marketing_site: Url,
pub os_version: VersionString,
pub hardware_requirements: HardwareRequirements,
#[ts(type = "string | null")]
pub source_version: Option<VersionRange>,
pub s9pk: RegistryAsset<MerkleArchiveCommitment>,
}
impl PackageVersionInfo {
pub async fn from_s9pk<S: FileSource + Clone>(s9pk: &S9pk<S>, url: Url) -> Result<Self, Error> {
let manifest = s9pk.as_manifest();
Ok(Self {
title: manifest.title.clone(),
icon: s9pk.icon_data_url().await?,
description: manifest.description.clone(),
release_notes: manifest.release_notes.clone(),
git_hash: manifest.git_hash.clone().or_not_found("git hash")?,
license: manifest.license.clone(),
wrapper_repo: manifest.wrapper_repo.clone(),
upstream_repo: manifest.upstream_repo.clone(),
support_site: manifest.support_site.clone(),
marketing_site: manifest.marketing_site.clone(),
os_version: manifest.os_version.clone(),
hardware_requirements: manifest.hardware_requirements.clone(),
source_version: None, // TODO
s9pk: RegistryAsset {
url,
commitment: s9pk.as_archive().commitment().await?,
signatures: [(
AnyVerifyingKey::Ed25519(s9pk.as_archive().signer()),
AnySignature::Ed25519(s9pk.as_archive().signature().await?),
)]
.into_iter()
.collect(),
},
})
}
pub fn table(&self, version: &VersionString) -> prettytable::Table {
use prettytable::*;
let mut table = Table::new();
table.add_row(row![bc => &self.title]);
table.add_row(row![br -> "VERSION", AsRef::<str>::as_ref(version)]);
table.add_row(row![br -> "RELEASE NOTES", &self.release_notes]);
table.add_row(row![br -> "ABOUT", &self.description.short]);
table.add_row(row![br -> "DESCRIPTION", &self.description.long]);
table.add_row(row![br -> "GIT HASH", AsRef::<str>::as_ref(&self.git_hash)]);
table.add_row(row![br -> "LICENSE", &self.license]);
table.add_row(row![br -> "PACKAGE REPO", &self.wrapper_repo.to_string()]);
table.add_row(row![br -> "SERVICE REPO", &self.upstream_repo.to_string()]);
table.add_row(row![br -> "WEBSITE", &self.marketing_site.to_string()]);
table.add_row(row![br -> "SUPPORT", &self.support_site.to_string()]);
table
}
}
impl Model<PackageVersionInfo> {
pub fn works_for_device(&self, device_info: &DeviceInfo) -> Result<bool, Error> {
if !self.as_os_version().de()?.satisfies(&device_info.os.compat) {
return Ok(false);
}
let hw = self.as_hardware_requirements().de()?;
if let Some(arch) = hw.arch {
if !arch.contains(&device_info.hardware.arch) {
return Ok(false);
}
}
if let Some(ram) = hw.ram {
if device_info.hardware.ram < ram {
return Ok(false);
}
}
for (class, regex) in hw.device {
if !device_info
.hardware
.devices
.get(&*class)
.unwrap_or(&Vec::new())
.iter()
.any(|product| regex.as_ref().is_match(product))
{
return Ok(false);
}
}
Ok(true)
}
}
pub async fn get_package_index(ctx: RegistryContext) -> Result<PackageIndex, Error> {
ctx.db.peek().await.into_index().into_package().de()
}

View File

@@ -0,0 +1,29 @@
use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler};
use crate::context::CliContext;
use crate::prelude::*;
use crate::util::serde::HandlerExtSerde;
pub mod add;
pub mod get;
pub mod index;
pub fn package_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
.subcommand(
"index",
from_fn_async(index::get_package_index)
.with_display_serializable()
.with_call_remote::<CliContext>(),
)
.subcommand("add", from_fn_async(add::add_package).no_cli())
.subcommand("add", from_fn_async(add::cli_add_package).no_display())
.subcommand(
"get",
from_fn_async(get::get_package)
.with_display_serializable()
.with_custom_display_fn(|handle, result| {
get::display_package_info(handle.params, result)
}),
)
}

View File

@@ -1,477 +0,0 @@
use std::collections::{HashMap, HashSet};
use std::path::Path;
use std::str::FromStr;
use clap::builder::ValueParserFactory;
use imbl_value::InternedString;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha512};
use tokio::io::AsyncWrite;
use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::{ArchiveSource, FileSource};
use crate::util::clap::FromStrParser;
use crate::util::serde::{Base64, Pem};
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct SignerInfo {
pub name: String,
pub contact: Vec<ContactInfo>,
pub keys: HashSet<SignerKey>,
}
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, Hash, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
#[serde(tag = "alg", content = "pubkey")]
pub enum SignerKey {
Ed25519(Pem<ed25519_dalek::VerifyingKey>),
}
impl SignerKey {
pub fn verifier(&self) -> Verifier {
match self {
Self::Ed25519(k) => Verifier::Ed25519(*k, Sha512::new()),
}
}
pub fn verify_message(
&self,
message: &[u8],
signature: &[u8],
context: &str,
) -> Result<(), Error> {
let mut v = self.verifier();
v.update(message);
v.verify(signature, context)
}
}
impl std::fmt::Display for SignerKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Ed25519(k) => write!(f, "{k}"),
}
}
}
pub enum Verifier {
Ed25519(Pem<ed25519_dalek::VerifyingKey>, Sha512),
}
impl Verifier {
pub fn update(&mut self, data: &[u8]) {
match self {
Self::Ed25519(_, h) => h.update(data),
}
}
pub fn verify(self, signature: &[u8], context: &str) -> Result<(), Error> {
match self {
Self::Ed25519(k, h) => k.verify_prehashed_strict(
h,
Some(context.as_bytes()),
&ed25519_dalek::Signature::from_slice(signature)?,
)?,
}
Ok(())
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
// TODO: better types
pub enum ContactInfo {
Email(String),
Matrix(String),
Website(#[ts(type = "string")] Url),
}
impl std::fmt::Display for ContactInfo {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Email(e) => write!(f, "mailto:{e}"),
Self::Matrix(m) => write!(f, "https://matrix.to/#/{m}"),
Self::Website(w) => write!(f, "{w}"),
}
}
}
impl FromStr for ContactInfo {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(if let Some(s) = s.strip_prefix("mailto:") {
Self::Email(s.to_owned())
} else if let Some(s) = s.strip_prefix("https://matrix.to/#/") {
Self::Matrix(s.to_owned())
} else {
Self::Website(s.parse()?)
})
}
}
impl ValueParserFactory for ContactInfo {
type Parser = FromStrParser<Self>;
fn value_parser() -> Self::Parser {
Self::Parser::new()
}
}
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "kebab-case")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct SignatureInfo {
#[ts(type = "string")]
pub context: InternedString,
pub blake3_ed255i9: Option<Blake3Ed2551SignatureInfo>,
}
impl SignatureInfo {
pub fn new(context: &str) -> Self {
Self {
context: context.into(),
blake3_ed255i9: None,
}
}
pub fn validate(&self, accept: AcceptSigners) -> Result<FileValidator, Error> {
FileValidator::from_signatures(self.signatures(), accept, &self.context)
}
pub fn all_signers(&self) -> AcceptSigners {
AcceptSigners::All(
self.signatures()
.map(|s| AcceptSigners::Signer(s.signer()))
.collect(),
)
.flatten()
}
pub fn signatures(&self) -> impl Iterator<Item = Signature> + '_ {
self.blake3_ed255i9.iter().flat_map(|info| {
info.signatures
.iter()
.map(|(k, s)| (k.clone(), *s))
.map(|(pubkey, signature)| {
Signature::Blake3Ed25519(Blake3Ed25519Signature {
hash: info.hash,
size: info.size,
pubkey,
signature,
})
})
})
}
pub fn add_sig(&mut self, signature: &Signature) -> Result<(), Error> {
signature.validate(&self.context)?;
match signature {
Signature::Blake3Ed25519(s) => {
if self
.blake3_ed255i9
.as_ref()
.map_or(true, |info| info.hash == s.hash)
{
let new = if let Some(mut info) = self.blake3_ed255i9.take() {
info.signatures.insert(s.pubkey, s.signature);
info
} else {
s.info()
};
self.blake3_ed255i9 = Some(new);
Ok(())
} else {
Err(Error::new(
eyre!("hash sum mismatch"),
ErrorKind::InvalidSignature,
))
}
}
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub enum AcceptSigners {
#[serde(skip)]
Accepted(Signature),
Signer(SignerKey),
Any(Vec<AcceptSigners>),
All(Vec<AcceptSigners>),
}
impl AcceptSigners {
const fn null() -> Self {
Self::Any(Vec::new())
}
pub fn flatten(self) -> Self {
match self {
Self::Any(mut s) | Self::All(mut s) if s.len() == 1 => s.swap_remove(0).flatten(),
s => s,
}
}
pub fn accepted(&self) -> bool {
match self {
Self::Accepted(_) => true,
Self::All(s) => s.iter().all(|s| s.accepted()),
_ => false,
}
}
pub fn try_accept(
self,
context: &str,
) -> Box<dyn Iterator<Item = Result<Signature, Error>> + Send + Sync + '_> {
match self {
Self::Accepted(s) => Box::new(std::iter::once(s).map(|s| {
s.validate(context)?;
Ok(s)
})),
Self::All(s) => Box::new(s.into_iter().flat_map(|s| s.try_accept(context))),
_ => Box::new(std::iter::once(Err(Error::new(
eyre!("signer(s) not accepted"),
ErrorKind::InvalidSignature,
)))),
}
}
pub fn process_signature(&mut self, sig: &Signature) {
let new = match std::mem::replace(self, Self::null()) {
Self::Accepted(s) => Self::Accepted(s),
Self::Signer(s) => {
if s == sig.signer() {
Self::Accepted(sig.clone())
} else {
Self::Signer(s)
}
}
Self::All(mut s) => {
s.iter_mut().for_each(|s| s.process_signature(sig));
Self::All(s)
}
Self::Any(mut s) => {
if let Some(s) = s
.iter_mut()
.map(|s| {
s.process_signature(sig);
s
})
.filter(|s| s.accepted())
.next()
{
std::mem::replace(s, Self::null())
} else {
Self::Any(s)
}
}
};
*self = new;
}
}
#[must_use]
pub struct FileValidator {
blake3: Option<blake3::Hash>,
size: Option<u64>,
}
impl FileValidator {
fn add_blake3(&mut self, hash: [u8; 32], size: u64) -> Result<(), Error> {
if let Some(h) = self.blake3 {
ensure_code!(h == hash, ErrorKind::InvalidSignature, "hash sum mismatch");
}
self.blake3 = Some(blake3::Hash::from_bytes(hash));
if let Some(s) = self.size {
ensure_code!(s == size, ErrorKind::InvalidSignature, "file size mismatch");
}
self.size = Some(size);
Ok(())
}
pub fn blake3(&self) -> Result<blake3::Hash, Error> {
if let Some(hash) = self.blake3 {
Ok(hash)
} else {
Err(Error::new(
eyre!("no BLAKE3 signatures found"),
ErrorKind::InvalidSignature,
))
}
}
pub fn size(&self) -> Result<u64, Error> {
if let Some(size) = self.size {
Ok(size)
} else {
Err(Error::new(
eyre!("no signatures found"),
ErrorKind::InvalidSignature,
))
}
}
pub fn from_signatures(
signatures: impl IntoIterator<Item = Signature>,
mut accept: AcceptSigners,
context: &str,
) -> Result<Self, Error> {
let mut res = Self {
blake3: None,
size: None,
};
for signature in signatures {
accept.process_signature(&signature);
}
for signature in accept.try_accept(context) {
match signature? {
Signature::Blake3Ed25519(s) => res.add_blake3(*s.hash, s.size)?,
}
}
Ok(res)
}
pub async fn download(
&self,
url: Url,
client: Client,
dst: &mut (impl AsyncWrite + Unpin + Send + ?Sized),
) -> Result<(), Error> {
let src = HttpSource::new(client, url).await?;
let (Some(hash), Some(size)) = (self.blake3, self.size) else {
return Err(Error::new(
eyre!("no BLAKE3 signatures found"),
ErrorKind::InvalidSignature,
));
};
src.section(0, size)
.copy_verify(dst, Some((hash, size)))
.await?;
Ok(())
}
pub async fn validate_file(&self, file: &MultiCursorFile) -> Result<(), Error> {
ensure_code!(
file.size().await == Some(self.size()?),
ErrorKind::InvalidSignature,
"file size mismatch"
);
ensure_code!(
file.blake3_mmap().await? == self.blake3()?,
ErrorKind::InvalidSignature,
"hash sum mismatch"
);
Ok(())
}
}
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct Blake3Ed2551SignatureInfo {
pub hash: Base64<[u8; 32]>,
pub size: u64,
pub signatures: HashMap<Pem<ed25519_dalek::VerifyingKey>, Base64<[u8; 64]>>,
}
impl Blake3Ed2551SignatureInfo {
pub fn validate(&self, context: &str) -> Result<Vec<Pem<ed25519_dalek::VerifyingKey>>, Error> {
self.signatures
.iter()
.map(|(k, s)| {
let sig = Blake3Ed25519Signature {
hash: self.hash,
size: self.size,
pubkey: k.clone(),
signature: *s,
};
sig.validate(context)?;
Ok(sig.pubkey)
})
.collect()
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub enum Signature {
Blake3Ed25519(Blake3Ed25519Signature),
}
impl Signature {
pub fn validate(&self, context: &str) -> Result<(), Error> {
match self {
Self::Blake3Ed25519(a) => a.validate(context),
}
}
pub fn signer(&self) -> SignerKey {
match self {
Self::Blake3Ed25519(s) => SignerKey::Ed25519(s.pubkey.clone()),
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct Blake3Ed25519Signature {
pub hash: Base64<[u8; 32]>,
pub size: u64,
pub pubkey: Pem<ed25519_dalek::VerifyingKey>,
// ed25519-sig(sha512(blake3(file) + len_u64_be(file)))
pub signature: Base64<[u8; 64]>,
}
impl Blake3Ed25519Signature {
pub async fn sign_file(
key: &ed25519_dalek::SigningKey,
file: &MultiCursorFile,
context: &str,
) -> Result<Self, Error> {
let size = file
.size()
.await
.ok_or_else(|| Error::new(eyre!("failed to get file size"), ErrorKind::Filesystem))?;
let hash = file.blake3_mmap().await?;
let signature = key.sign_prehashed(
Sha512::new_with_prefix(hash.as_bytes()).chain_update(u64::to_be_bytes(size)),
Some(context.as_bytes()),
)?;
Ok(Self {
hash: Base64(*hash.as_bytes()),
size,
pubkey: Pem::new(key.verifying_key()),
signature: Base64(signature.to_bytes()),
})
}
pub fn validate(&self, context: &str) -> Result<(), Error> {
let sig = ed25519_dalek::Signature::from_bytes(&*self.signature);
self.pubkey.verify_prehashed_strict(
Sha512::new_with_prefix(*self.hash).chain_update(u64::to_be_bytes(self.size)),
Some(context.as_bytes()),
&sig,
)?;
Ok(())
}
pub async fn check_file(&self, file: &MultiCursorFile) -> Result<(), Error> {
let size = file
.size()
.await
.ok_or_else(|| Error::new(eyre!("failed to get file size"), ErrorKind::Filesystem))?;
if self.size != size {
return Err(Error::new(
eyre!("incorrect file size: expected {} got {}", self.size, size),
ErrorKind::InvalidSignature,
));
}
let hash = file.blake3_mmap().await?;
if &*self.hash != hash.as_bytes() {
return Err(Error::new(
eyre!("hash sum mismatch"),
ErrorKind::InvalidSignature,
));
}
Ok(())
}
pub fn info(&self) -> Blake3Ed2551SignatureInfo {
Blake3Ed2551SignatureInfo {
hash: self.hash,
size: self.size,
signatures: [(self.pubkey, self.signature)].into_iter().collect(),
}
}
}

View File

@@ -0,0 +1,50 @@
use blake3::Hash;
use digest::Update;
use serde::{Deserialize, Serialize};
use tokio::io::AsyncWrite;
use ts_rs::TS;
use crate::prelude::*;
use crate::registry::signer::commitment::{Commitment, Digestable};
use crate::s9pk::merkle_archive::hash::VerifyingWriter;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::io::{ParallelBlake3Writer, TrackingIO};
use crate::util::serde::Base64;
use crate::CAP_10_MiB;
#[derive(Clone, Debug, Deserialize, Serialize, HasModel, PartialEq, Eq, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct Blake3Commitment {
pub hash: Base64<[u8; 32]>,
#[ts(type = "number")]
pub size: u64,
}
impl Digestable for Blake3Commitment {
fn update<D: Update>(&self, digest: &mut D) {
digest.update(&*self.hash);
digest.update(&u64::to_be_bytes(self.size));
}
}
impl<'a, Resource: ArchiveSource> Commitment<&'a Resource> for Blake3Commitment {
async fn create(resource: &'a Resource) -> Result<Self, Error> {
let mut hasher = TrackingIO::new(0, ParallelBlake3Writer::new(CAP_10_MiB));
resource.copy_all_to(&mut hasher).await?;
Ok(Self {
size: hasher.position(),
hash: Base64(*hasher.into_inner().finalize().await?.as_bytes()),
})
}
async fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: &'a Resource,
writer: W,
) -> Result<(), Error> {
let mut hasher =
VerifyingWriter::new(writer, Some((Hash::from_bytes(*self.hash), self.size)));
resource.copy_to(0, self.size, &mut hasher).await?;
hasher.verify().await?;
Ok(())
}
}

View File

@@ -0,0 +1,98 @@
use digest::Update;
use serde::{Deserialize, Serialize};
use tokio::io::AsyncWrite;
use ts_rs::TS;
use crate::prelude::*;
use crate::registry::signer::commitment::{Commitment, Digestable};
use crate::s9pk::merkle_archive::source::FileSource;
use crate::s9pk::merkle_archive::MerkleArchive;
use crate::s9pk::S9pk;
use crate::util::io::TrackingIO;
use crate::util::serde::Base64;
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct MerkleArchiveCommitment {
pub root_sighash: Base64<[u8; 32]>,
#[ts(type = "number")]
pub root_maxsize: u64,
}
impl Digestable for MerkleArchiveCommitment {
fn update<D: Update>(&self, digest: &mut D) {
digest.update(&*self.root_sighash);
digest.update(&u64::to_be_bytes(self.root_maxsize));
}
}
impl<'a, S: FileSource + Clone> Commitment<&'a MerkleArchive<S>> for MerkleArchiveCommitment {
async fn create(resource: &'a MerkleArchive<S>) -> Result<Self, Error> {
resource.commitment().await
}
async fn check(&self, resource: &'a MerkleArchive<S>) -> Result<(), Error> {
let MerkleArchiveCommitment {
root_sighash,
root_maxsize,
} = resource.commitment().await?;
if root_sighash != self.root_sighash {
return Err(Error::new(
eyre!("merkle root mismatch"),
ErrorKind::InvalidSignature,
));
}
if root_maxsize > self.root_maxsize {
return Err(Error::new(
eyre!("merkle root directory max size too large"),
ErrorKind::InvalidSignature,
));
}
Ok(())
}
async fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: &'a MerkleArchive<S>,
writer: W,
) -> Result<(), Error> {
self.check(resource).await?;
resource
.serialize(&mut TrackingIO::new(0, writer), true)
.await
}
}
impl<'a, S: FileSource + Clone> Commitment<&'a S9pk<S>> for MerkleArchiveCommitment {
async fn create(resource: &'a S9pk<S>) -> Result<Self, Error> {
resource.as_archive().commitment().await
}
async fn check(&self, resource: &'a S9pk<S>) -> Result<(), Error> {
let MerkleArchiveCommitment {
root_sighash,
root_maxsize,
} = resource.as_archive().commitment().await?;
if root_sighash != self.root_sighash {
return Err(Error::new(
eyre!("merkle root mismatch"),
ErrorKind::InvalidSignature,
));
}
if root_maxsize > self.root_maxsize {
return Err(Error::new(
eyre!("merkle root directory max size too large"),
ErrorKind::InvalidSignature,
));
}
Ok(())
}
async fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: &'a S9pk<S>,
writer: W,
) -> Result<(), Error> {
self.check(resource).await?;
resource
.clone()
.serialize(&mut TrackingIO::new(0, writer), true)
.await
}
}

View File

@@ -0,0 +1,25 @@
use digest::Update;
use futures::Future;
use tokio::io::AsyncWrite;
use crate::prelude::*;
pub mod blake3;
pub mod merkle_archive;
pub mod request;
pub trait Digestable {
fn update<D: Update>(&self, digest: &mut D);
}
pub trait Commitment<Resource>: Sized + Digestable {
fn create(resource: Resource) -> impl Future<Output = Result<Self, Error>> + Send;
fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: Resource,
writer: W,
) -> impl Future<Output = Result<(), Error>> + Send;
fn check(&self, resource: Resource) -> impl Future<Output = Result<(), Error>> + Send {
self.copy_to(resource, tokio::io::sink())
}
}

View File

@@ -0,0 +1,102 @@
use std::time::{SystemTime, UNIX_EPOCH};
use std::collections::BTreeMap;
use axum::body::Body;
use axum::extract::Request;
use digest::Update;
use futures::TryStreamExt;
use serde::{Deserialize, Serialize};
use tokio::io::AsyncWrite;
use tokio_util::io::StreamReader;
use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::registry::signer::commitment::{Commitment, Digestable};
use crate::s9pk::merkle_archive::hash::VerifyingWriter;
use crate::util::serde::Base64;
#[derive(Clone, Debug, Deserialize, Serialize, HasModel, PartialEq, Eq, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct RequestCommitment {
#[ts(type = "number")]
pub timestamp: i64,
#[ts(type = "number")]
pub nonce: u64,
#[ts(type = "number")]
pub size: u64,
pub blake3: Base64<[u8; 32]>,
}
impl RequestCommitment {
pub fn append_query(&self, url: &mut Url) {
url.query_pairs_mut()
.append_pair("timestamp", &self.timestamp.to_string())
.append_pair("nonce", &self.nonce.to_string())
.append_pair("size", &self.size.to_string())
.append_pair("blake3", &self.blake3.to_string());
}
pub fn from_query(url: &Url) -> Result<Self, Error> {
let query: BTreeMap<_, _> = url.query_pairs().collect();
Ok(Self {
timestamp: query.get("timestamp").or_not_found("timestamp")?.parse()?,
nonce: query.get("nonce").or_not_found("nonce")?.parse()?,
size: query.get("size").or_not_found("size")?.parse()?,
blake3: query.get("blake3").or_not_found("blake3")?.parse()?,
})
}
}
impl Digestable for RequestCommitment {
fn update<D: Update>(&self, digest: &mut D) {
digest.update(&i64::to_be_bytes(self.timestamp));
digest.update(&u64::to_be_bytes(self.nonce));
digest.update(&u64::to_be_bytes(self.size));
digest.update(&*self.blake3);
}
}
impl<'a> Commitment<&'a mut Request> for RequestCommitment {
async fn create(resource: &'a mut Request) -> Result<Self, Error> {
use http_body_util::BodyExt;
let body = std::mem::replace(resource.body_mut(), Body::empty())
.collect()
.await
.with_kind(ErrorKind::Network)?
.to_bytes();
let res = Self {
timestamp: SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs() as i64)
.unwrap_or_else(|e| e.duration().as_secs() as i64 * -1),
nonce: rand::random(),
size: body.len() as u64,
blake3: Base64(*blake3::hash(&*body).as_bytes()),
};
*resource.body_mut() = Body::from(body);
Ok(res)
}
async fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: &'a mut Request,
writer: W,
) -> Result<(), Error> {
use tokio::io::AsyncReadExt;
let mut body = StreamReader::new(
std::mem::replace(resource.body_mut(), Body::empty())
.into_data_stream()
.map_err(std::io::Error::other),
)
.take(self.size);
let mut writer = VerifyingWriter::new(
writer,
Some((blake3::Hash::from_bytes(*self.blake3), self.size)),
);
tokio::io::copy(&mut body, &mut writer).await?;
writer.verify().await?;
Ok(())
}
}

View File

@@ -0,0 +1,154 @@
use std::collections::HashSet;
use std::str::FromStr;
use clap::builder::ValueParserFactory;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::registry::signer::commitment::Digestable;
use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme};
use crate::util::clap::FromStrParser;
pub mod commitment;
pub mod sign;
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct SignerInfo {
pub name: String,
pub contact: Vec<ContactInfo>,
pub keys: HashSet<AnyVerifyingKey>,
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
// TODO: better types
pub enum ContactInfo {
Email(String),
Matrix(String),
Website(#[ts(type = "string")] Url),
}
impl std::fmt::Display for ContactInfo {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Email(e) => write!(f, "mailto:{e}"),
Self::Matrix(m) => write!(f, "https://matrix.to/#/{m}"),
Self::Website(w) => write!(f, "{w}"),
}
}
}
impl FromStr for ContactInfo {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(if let Some(s) = s.strip_prefix("mailto:") {
Self::Email(s.to_owned())
} else if let Some(s) = s.strip_prefix("https://matrix.to/#/") {
Self::Matrix(s.to_owned())
} else {
Self::Website(s.parse()?)
})
}
}
impl ValueParserFactory for ContactInfo {
type Parser = FromStrParser<Self>;
fn value_parser() -> Self::Parser {
Self::Parser::new()
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub enum AcceptSigners {
#[serde(skip)]
Accepted,
Signer(AnyVerifyingKey),
Any(Vec<AcceptSigners>),
All(Vec<AcceptSigners>),
}
impl AcceptSigners {
const fn null() -> Self {
Self::Any(Vec::new())
}
pub fn flatten(self) -> Self {
match self {
Self::Any(mut s) | Self::All(mut s) if s.len() == 1 => s.swap_remove(0).flatten(),
s => s,
}
}
pub fn accepted(&self) -> bool {
match self {
Self::Accepted => true,
_ => false,
}
}
pub fn try_accept(self) -> Result<(), Error> {
if self.accepted() {
Ok(())
} else {
Err(Error::new(
eyre!("signer(s) not accepted"),
ErrorKind::InvalidSignature,
))
}
}
pub fn process_signature(
&mut self,
signer: &AnyVerifyingKey,
commitment: &impl Digestable,
context: &str,
signature: &AnySignature,
) -> Result<(), Error> {
let mut res = Ok(());
let new = match std::mem::replace(self, Self::null()) {
Self::Accepted => Self::Accepted,
Self::Signer(s) => {
if &s == signer {
res = signer
.scheme()
.verify_commitment(signer, commitment, context, signature);
Self::Accepted
} else {
Self::Signer(s)
}
}
Self::All(mut s) => {
res = s
.iter_mut()
.map(|s| s.process_signature(signer, commitment, context, signature))
.collect();
if s.iter().all(|s| s.accepted()) {
Self::Accepted
} else {
Self::All(s)
}
}
Self::Any(mut s) => {
match s
.iter_mut()
.map(|s| {
s.process_signature(signer, commitment, context, signature)?;
Ok(s)
})
.filter_ok(|s| s.accepted())
.next()
{
Some(Ok(s)) => std::mem::replace(s, Self::null()),
Some(Err(e)) => {
res = Err(e);
Self::Any(s)
}
None => Self::Any(s),
}
}
};
*self = new;
res
}
}

View File

@@ -0,0 +1,34 @@
use ed25519_dalek::{Signature, SigningKey, VerifyingKey};
use sha2::Sha512;
use crate::prelude::*;
use crate::registry::signer::sign::SignatureScheme;
pub struct Ed25519;
impl SignatureScheme for Ed25519 {
type SigningKey = SigningKey;
type VerifyingKey = VerifyingKey;
type Signature = Signature;
type Digest = Sha512;
fn new_digest(&self) -> Self::Digest {
<Self::Digest as digest::Digest>::new()
}
fn sign(
&self,
key: &Self::SigningKey,
digest: Self::Digest,
context: &str,
) -> Result<Self::Signature, Error> {
Ok(key.sign_prehashed(digest, Some(context.as_bytes()))?)
}
fn verify(
&self,
key: &Self::VerifyingKey,
digest: Self::Digest,
context: &str,
signature: &Self::Signature,
) -> Result<(), Error> {
key.verify_prehashed_strict(digest, Some(context.as_bytes()), signature)?;
Ok(())
}
}

View File

@@ -0,0 +1,348 @@
use std::fmt::Display;
use std::str::FromStr;
use ::ed25519::pkcs8::BitStringRef;
use clap::builder::ValueParserFactory;
use der::referenced::OwnedToRef;
use der::{Decode, Encode};
use pkcs8::der::AnyRef;
use pkcs8::{PrivateKeyInfo, SubjectPublicKeyInfo};
use serde::{Deserialize, Serialize};
use sha2::Sha512;
use ts_rs::TS;
use crate::prelude::*;
use crate::registry::signer::commitment::Digestable;
use crate::registry::signer::sign::ed25519::Ed25519;
use crate::util::clap::FromStrParser;
use crate::util::serde::{deserialize_from_str, serialize_display};
pub mod ed25519;
pub trait SignatureScheme {
type SigningKey;
type VerifyingKey;
type Signature;
type Digest: digest::Update;
fn new_digest(&self) -> Self::Digest;
fn sign(
&self,
key: &Self::SigningKey,
digest: Self::Digest,
context: &str,
) -> Result<Self::Signature, Error>;
fn sign_commitment<C: Digestable>(
&self,
key: &Self::SigningKey,
commitment: &C,
context: &str,
) -> Result<Self::Signature, Error> {
let mut digest = self.new_digest();
commitment.update(&mut digest);
self.sign(key, digest, context)
}
fn verify(
&self,
key: &Self::VerifyingKey,
digest: Self::Digest,
context: &str,
signature: &Self::Signature,
) -> Result<(), Error>;
fn verify_commitment<C: Digestable>(
&self,
key: &Self::VerifyingKey,
commitment: &C,
context: &str,
signature: &Self::Signature,
) -> Result<(), Error> {
let mut digest = self.new_digest();
commitment.update(&mut digest);
self.verify(key, digest, context, signature)
}
}
pub enum AnyScheme {
Ed25519(Ed25519),
}
impl From<Ed25519> for AnyScheme {
fn from(value: Ed25519) -> Self {
Self::Ed25519(value)
}
}
impl SignatureScheme for AnyScheme {
type SigningKey = AnySigningKey;
type VerifyingKey = AnyVerifyingKey;
type Signature = AnySignature;
type Digest = AnyDigest;
fn new_digest(&self) -> Self::Digest {
match self {
Self::Ed25519(s) => AnyDigest::Sha512(s.new_digest()),
}
}
fn sign(
&self,
key: &Self::SigningKey,
digest: Self::Digest,
context: &str,
) -> Result<Self::Signature, Error> {
match (self, key, digest) {
(Self::Ed25519(s), AnySigningKey::Ed25519(key), AnyDigest::Sha512(digest)) => {
Ok(AnySignature::Ed25519(s.sign(key, digest, context)?))
}
_ => Err(Error::new(
eyre!("mismatched signature algorithm"),
ErrorKind::InvalidSignature,
)),
}
}
fn verify(
&self,
key: &Self::VerifyingKey,
digest: Self::Digest,
context: &str,
signature: &Self::Signature,
) -> Result<(), Error> {
match (self, key, digest, signature) {
(
Self::Ed25519(s),
AnyVerifyingKey::Ed25519(key),
AnyDigest::Sha512(digest),
AnySignature::Ed25519(signature),
) => s.verify(key, digest, context, signature),
_ => Err(Error::new(
eyre!("mismatched signature algorithm"),
ErrorKind::InvalidSignature,
)),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, TS)]
#[ts(export, type = "string")]
pub enum AnySigningKey {
Ed25519(<Ed25519 as SignatureScheme>::SigningKey),
}
impl AnySigningKey {
pub fn scheme(&self) -> AnyScheme {
match self {
Self::Ed25519(_) => AnyScheme::Ed25519(Ed25519),
}
}
pub fn verifying_key(&self) -> AnyVerifyingKey {
match self {
Self::Ed25519(k) => AnyVerifyingKey::Ed25519(k.into()),
}
}
}
impl<'a> TryFrom<PrivateKeyInfo<'a>> for AnySigningKey {
type Error = pkcs8::Error;
fn try_from(value: PrivateKeyInfo<'a>) -> Result<Self, Self::Error> {
if value.algorithm == ed25519_dalek::pkcs8::ALGORITHM_ID {
Ok(Self::Ed25519(ed25519_dalek::SigningKey::try_from(value)?))
} else {
Err(pkcs8::spki::Error::OidUnknown {
oid: value.algorithm.oid,
}
.into())
}
}
}
impl pkcs8::EncodePrivateKey for AnySigningKey {
fn to_pkcs8_der(&self) -> pkcs8::Result<pkcs8::SecretDocument> {
match self {
Self::Ed25519(s) => s.to_pkcs8_der(),
}
}
}
impl FromStr for AnySigningKey {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use pkcs8::DecodePrivateKey;
Self::from_pkcs8_pem(s).with_kind(ErrorKind::Deserialization)
}
}
impl Display for AnySigningKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use pkcs8::EncodePrivateKey;
f.write_str(
&self
.to_pkcs8_pem(pkcs8::LineEnding::LF)
.map_err(|_| std::fmt::Error)?,
)
}
}
impl<'de> Deserialize<'de> for AnySigningKey {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserialize_from_str(deserializer)
}
}
impl Serialize for AnySigningKey {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serialize_display(self, serializer)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, TS)]
#[ts(export, type = "string")]
pub enum AnyVerifyingKey {
Ed25519(<Ed25519 as SignatureScheme>::VerifyingKey),
}
impl AnyVerifyingKey {
pub fn scheme(&self) -> AnyScheme {
match self {
Self::Ed25519(_) => AnyScheme::Ed25519(Ed25519),
}
}
}
impl<'a> TryFrom<SubjectPublicKeyInfo<AnyRef<'a>, BitStringRef<'a>>> for AnyVerifyingKey {
type Error = pkcs8::spki::Error;
fn try_from(
value: SubjectPublicKeyInfo<AnyRef<'a>, BitStringRef<'a>>,
) -> Result<Self, Self::Error> {
if value.algorithm == ed25519_dalek::pkcs8::ALGORITHM_ID {
Ok(Self::Ed25519(ed25519_dalek::VerifyingKey::try_from(value)?))
} else {
Err(pkcs8::spki::Error::OidUnknown {
oid: value.algorithm.oid,
})
}
}
}
impl pkcs8::EncodePublicKey for AnyVerifyingKey {
fn to_public_key_der(&self) -> pkcs8::spki::Result<pkcs8::Document> {
match self {
Self::Ed25519(s) => s.to_public_key_der(),
}
}
}
impl FromStr for AnyVerifyingKey {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use pkcs8::DecodePublicKey;
Self::from_public_key_pem(s).with_kind(ErrorKind::Deserialization)
}
}
impl Display for AnyVerifyingKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use pkcs8::EncodePublicKey;
f.write_str(
&self
.to_public_key_pem(pkcs8::LineEnding::LF)
.map_err(|_| std::fmt::Error)?,
)
}
}
impl<'de> Deserialize<'de> for AnyVerifyingKey {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserialize_from_str(deserializer)
}
}
impl Serialize for AnyVerifyingKey {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serialize_display(self, serializer)
}
}
impl ValueParserFactory for AnyVerifyingKey {
type Parser = FromStrParser<Self>;
fn value_parser() -> Self::Parser {
Self::Parser::new()
}
}
#[derive(Clone, Debug)]
pub enum AnyDigest {
Sha512(Sha512),
}
impl digest::Update for AnyDigest {
fn update(&mut self, data: &[u8]) {
match self {
Self::Sha512(d) => digest::Update::update(d, data),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, TS)]
#[ts(export, type = "string")]
pub enum AnySignature {
Ed25519(<Ed25519 as SignatureScheme>::Signature),
}
impl FromStr for AnySignature {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use der::DecodePem;
#[derive(der::Sequence)]
struct AnySignatureDer {
alg: pkcs8::spki::AlgorithmIdentifierOwned,
sig: der::asn1::OctetString,
}
impl der::pem::PemLabel for AnySignatureDer {
const PEM_LABEL: &'static str = "SIGNATURE";
}
let der = AnySignatureDer::from_pem(s.as_bytes()).with_kind(ErrorKind::Deserialization)?;
if der.alg.oid == ed25519_dalek::pkcs8::ALGORITHM_ID.oid
&& der.alg.parameters.owned_to_ref() == ed25519_dalek::pkcs8::ALGORITHM_ID.parameters
{
Ok(Self::Ed25519(
ed25519_dalek::Signature::from_slice(der.sig.as_bytes())
.with_kind(ErrorKind::Deserialization)?,
))
} else {
Err(pkcs8::spki::Error::OidUnknown { oid: der.alg.oid })
.with_kind(ErrorKind::Deserialization)
}
}
}
impl Display for AnySignature {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use der::EncodePem;
#[derive(der::Sequence)]
struct AnySignatureDer<'a> {
alg: pkcs8::AlgorithmIdentifierRef<'a>,
sig: der::asn1::OctetString,
}
impl<'a> der::pem::PemLabel for AnySignatureDer<'a> {
const PEM_LABEL: &'static str = "SIGNATURE";
}
f.write_str(
&match self {
Self::Ed25519(s) => AnySignatureDer {
alg: ed25519_dalek::pkcs8::ALGORITHM_ID,
sig: der::asn1::OctetString::new(s.to_bytes()).map_err(|_| std::fmt::Error)?,
},
}
.to_pem(der::pem::LineEnding::LF)
.map_err(|_| std::fmt::Error)?,
)
}
}
impl<'de> Deserialize<'de> for AnySignature {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserialize_from_str(deserializer)
}
}
impl Serialize for AnySignature {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serialize_display(self, serializer)
}
}