Refactor/project structure (#3085)

* refactor project structure

* environment-based default registry

* fix tests

* update build container

* use docker platform for iso build emulation

* simplify compat

* Fix docker platform spec in run-compat.sh

* handle riscv compat

* fix bug with dep error exists attr

* undo removal of sorting

* use qemu for iso stage

---------

Co-authored-by: Mariusz Kogen <k0gen@pm.me>
Co-authored-by: Matt Hill <mattnine@protonmail.com>
This commit is contained in:
Aiden McClelland
2025-12-22 13:39:38 -07:00
committed by GitHub
parent eda08d5b0f
commit 96ae532879
389 changed files with 744 additions and 4005 deletions

View File

@@ -0,0 +1,50 @@
use blake3::Hash;
use digest::Update;
use serde::{Deserialize, Serialize};
use tokio::io::AsyncWrite;
use ts_rs::TS;
use crate::CAP_10_MiB;
use crate::prelude::*;
use crate::s9pk::merkle_archive::hash::VerifyingWriter;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::sign::commitment::{Commitment, Digestable};
use crate::util::io::{ParallelBlake3Writer, TrackingIO};
use crate::util::serde::Base64;
#[derive(Clone, Debug, Deserialize, Serialize, HasModel, PartialEq, Eq, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct Blake3Commitment {
pub hash: Base64<[u8; 32]>,
#[ts(type = "number")]
pub size: u64,
}
impl Digestable for Blake3Commitment {
fn update<D: Update>(&self, digest: &mut D) {
digest.update(&*self.hash);
digest.update(&u64::to_be_bytes(self.size));
}
}
impl<'a, Resource: ArchiveSource> Commitment<&'a Resource> for Blake3Commitment {
async fn create(resource: &'a Resource) -> Result<Self, Error> {
let mut hasher = TrackingIO::new(0, ParallelBlake3Writer::new(CAP_10_MiB));
resource.copy_all_to(&mut hasher).await?;
Ok(Self {
size: hasher.position(),
hash: Base64(*hasher.into_inner().finalize().await?.as_bytes()),
})
}
async fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: &'a Resource,
writer: W,
) -> Result<(), Error> {
let mut hasher =
VerifyingWriter::new(writer, Some((Hash::from_bytes(*self.hash), self.size)));
resource.copy_to(0, self.size, &mut hasher).await?;
hasher.verify().await?;
Ok(())
}
}

View File

@@ -0,0 +1,127 @@
use digest::Update;
use serde::{Deserialize, Serialize};
use tokio::io::AsyncWrite;
use ts_rs::TS;
use crate::prelude::*;
use crate::s9pk::S9pk;
use crate::s9pk::merkle_archive::MerkleArchive;
use crate::s9pk::merkle_archive::source::FileSource;
use crate::sign::commitment::{Commitment, Digestable};
use crate::util::io::TrackingIO;
use crate::util::serde::Base64;
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct MerkleArchiveCommitment {
pub root_sighash: Base64<[u8; 32]>,
#[ts(type = "number")]
pub root_maxsize: u64,
}
impl MerkleArchiveCommitment {
pub fn from_query(query: &str) -> Result<Option<Self>, Error> {
let mut root_sighash = None;
let mut root_maxsize = None;
for (k, v) in form_urlencoded::parse(query.as_bytes()) {
match &*k {
"rootSighash" => {
root_sighash = Some(v.parse()?);
}
"rootMaxsize" => {
root_maxsize = Some(v.parse()?);
}
_ => (),
}
}
if root_sighash.is_some() || root_maxsize.is_some() {
Ok(Some(Self {
root_sighash: root_sighash
.or_not_found("rootSighash required if rootMaxsize specified")
.with_kind(ErrorKind::InvalidRequest)?,
root_maxsize: root_maxsize
.or_not_found("rootMaxsize required if rootSighash specified")
.with_kind(ErrorKind::InvalidRequest)?,
}))
} else {
Ok(None)
}
}
}
impl Digestable for MerkleArchiveCommitment {
fn update<D: Update>(&self, digest: &mut D) {
digest.update(&*self.root_sighash);
digest.update(&u64::to_be_bytes(self.root_maxsize));
}
}
impl<'a, S: FileSource + Clone> Commitment<&'a MerkleArchive<S>> for MerkleArchiveCommitment {
async fn create(resource: &'a MerkleArchive<S>) -> Result<Self, Error> {
resource.commitment().await
}
async fn check(&self, resource: &'a MerkleArchive<S>) -> Result<(), Error> {
let MerkleArchiveCommitment {
root_sighash,
root_maxsize,
} = resource.commitment().await?;
if root_sighash != self.root_sighash {
return Err(Error::new(
eyre!("merkle root mismatch"),
ErrorKind::InvalidSignature,
));
}
if root_maxsize > self.root_maxsize {
return Err(Error::new(
eyre!("merkle root directory max size too large"),
ErrorKind::InvalidSignature,
));
}
Ok(())
}
async fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: &'a MerkleArchive<S>,
writer: W,
) -> Result<(), Error> {
self.check(resource).await?;
resource
.serialize(&mut TrackingIO::new(0, writer), true)
.await
}
}
impl<'a, S: FileSource + Clone> Commitment<&'a S9pk<S>> for MerkleArchiveCommitment {
async fn create(resource: &'a S9pk<S>) -> Result<Self, Error> {
resource.as_archive().commitment().await
}
async fn check(&self, resource: &'a S9pk<S>) -> Result<(), Error> {
let MerkleArchiveCommitment {
root_sighash,
root_maxsize,
} = resource.as_archive().commitment().await?;
if root_sighash != self.root_sighash {
return Err(Error::new(
eyre!("merkle root mismatch"),
ErrorKind::InvalidSignature,
));
}
if root_maxsize > self.root_maxsize {
return Err(Error::new(
eyre!("merkle root directory max size too large"),
ErrorKind::InvalidSignature,
));
}
Ok(())
}
async fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: &'a S9pk<S>,
writer: W,
) -> Result<(), Error> {
self.check(resource).await?;
resource
.clone()
.serialize(&mut TrackingIO::new(0, writer), true)
.await
}
}

View File

@@ -0,0 +1,25 @@
use digest::Update;
use futures::Future;
use tokio::io::AsyncWrite;
use crate::prelude::*;
pub mod blake3;
pub mod merkle_archive;
pub mod request;
pub trait Digestable {
fn update<D: Update>(&self, digest: &mut D);
}
pub trait Commitment<Resource>: Sized + Digestable {
fn create(resource: Resource) -> impl Future<Output = Result<Self, Error>> + Send;
fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: Resource,
writer: W,
) -> impl Future<Output = Result<(), Error>> + Send;
fn check(&self, resource: Resource) -> impl Future<Output = Result<(), Error>> + Send {
self.copy_to(resource, tokio::io::sink())
}
}

View File

@@ -0,0 +1,103 @@
use std::collections::BTreeMap;
use std::time::{SystemTime, UNIX_EPOCH};
use axum::body::Body;
use axum::extract::Request;
use digest::Update;
use futures::TryStreamExt;
use http::HeaderValue;
use serde::{Deserialize, Serialize};
use tokio::io::AsyncWrite;
use tokio_util::io::StreamReader;
use ts_rs::TS;
use url::Url;
use crate::prelude::*;
use crate::s9pk::merkle_archive::hash::VerifyingWriter;
use crate::sign::commitment::{Commitment, Digestable};
use crate::util::serde::Base64;
#[derive(Clone, Debug, Deserialize, Serialize, HasModel, PartialEq, Eq, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct RequestCommitment {
#[ts(type = "number")]
pub timestamp: i64,
#[ts(type = "number")]
pub nonce: u64,
#[ts(type = "number")]
pub size: u64,
pub blake3: Base64<[u8; 32]>,
}
impl RequestCommitment {
pub fn append_query(&self, url: &mut Url) {
url.query_pairs_mut()
.append_pair("timestamp", &self.timestamp.to_string())
.append_pair("nonce", &self.nonce.to_string())
.append_pair("size", &self.size.to_string())
.append_pair("blake3", &self.blake3.to_string());
}
pub fn from_query(query: &HeaderValue) -> Result<Self, Error> {
let query: BTreeMap<_, _> = form_urlencoded::parse(query.as_bytes()).collect();
Ok(Self {
timestamp: query.get("timestamp").or_not_found("timestamp")?.parse()?,
nonce: query.get("nonce").or_not_found("nonce")?.parse()?,
size: query.get("size").or_not_found("size")?.parse()?,
blake3: query.get("blake3").or_not_found("blake3")?.parse()?,
})
}
}
impl Digestable for RequestCommitment {
fn update<D: Update>(&self, digest: &mut D) {
digest.update(&i64::to_be_bytes(self.timestamp));
digest.update(&u64::to_be_bytes(self.nonce));
digest.update(&u64::to_be_bytes(self.size));
digest.update(&*self.blake3);
}
}
impl<'a> Commitment<&'a mut Request> for RequestCommitment {
async fn create(resource: &'a mut Request) -> Result<Self, Error> {
use http_body_util::BodyExt;
let body = std::mem::replace(resource.body_mut(), Body::empty())
.collect()
.await
.with_kind(ErrorKind::Network)?
.to_bytes();
let res = Self {
timestamp: SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs() as i64)
.unwrap_or_else(|e| e.duration().as_secs() as i64 * -1),
nonce: rand::random(),
size: body.len() as u64,
blake3: Base64(*blake3::hash(&*body).as_bytes()),
};
*resource.body_mut() = Body::from(body);
Ok(res)
}
async fn copy_to<W: AsyncWrite + Unpin + Send>(
&self,
resource: &'a mut Request,
writer: W,
) -> Result<(), Error> {
use tokio::io::AsyncReadExt;
let mut body = StreamReader::new(
std::mem::replace(resource.body_mut(), Body::empty())
.into_data_stream()
.map_err(std::io::Error::other),
)
.take(self.size);
let mut writer = VerifyingWriter::new(
writer,
Some((blake3::Hash::from_bytes(*self.blake3), self.size)),
);
tokio::io::copy(&mut body, &mut writer).await?;
writer.verify().await?;
Ok(())
}
}

34
core/src/sign/ed25519.rs Normal file
View File

@@ -0,0 +1,34 @@
use ed25519_dalek::{Signature, SigningKey, VerifyingKey};
use sha2::Sha512;
use crate::prelude::*;
use crate::sign::SignatureScheme;
pub struct Ed25519;
impl SignatureScheme for Ed25519 {
type SigningKey = SigningKey;
type VerifyingKey = VerifyingKey;
type Signature = Signature;
type Digest = Sha512;
fn new_digest(&self) -> Self::Digest {
<Self::Digest as digest::Digest>::new()
}
fn sign(
&self,
key: &Self::SigningKey,
digest: Self::Digest,
context: &str,
) -> Result<Self::Signature, Error> {
Ok(key.sign_prehashed(digest, Some(context.as_bytes()))?)
}
fn verify(
&self,
key: &Self::VerifyingKey,
digest: Self::Digest,
context: &str,
signature: &Self::Signature,
) -> Result<(), Error> {
key.verify_prehashed_strict(digest, Some(context.as_bytes()), signature)?;
Ok(())
}
}

352
core/src/sign/mod.rs Normal file
View File

@@ -0,0 +1,352 @@
use std::fmt::Display;
use std::str::FromStr;
use ::ed25519::pkcs8::BitStringRef;
use clap::builder::ValueParserFactory;
use der::referenced::OwnedToRef;
use pkcs8::der::AnyRef;
use pkcs8::{PrivateKeyInfo, SubjectPublicKeyInfo};
use serde::{Deserialize, Serialize};
use sha2::Sha512;
use ts_rs::TS;
use crate::prelude::*;
use crate::sign::commitment::Digestable;
use crate::sign::ed25519::Ed25519;
use crate::util::FromStrParser;
use crate::util::serde::{deserialize_from_str, serialize_display};
pub mod commitment;
pub mod ed25519;
pub trait SignatureScheme {
type SigningKey;
type VerifyingKey;
type Signature;
type Digest: digest::Update;
fn new_digest(&self) -> Self::Digest;
fn sign(
&self,
key: &Self::SigningKey,
digest: Self::Digest,
context: &str,
) -> Result<Self::Signature, Error>;
fn sign_commitment<C: Digestable>(
&self,
key: &Self::SigningKey,
commitment: &C,
context: &str,
) -> Result<Self::Signature, Error> {
let mut digest = self.new_digest();
commitment.update(&mut digest);
self.sign(key, digest, context)
}
fn verify(
&self,
key: &Self::VerifyingKey,
digest: Self::Digest,
context: &str,
signature: &Self::Signature,
) -> Result<(), Error>;
fn verify_commitment<C: Digestable>(
&self,
key: &Self::VerifyingKey,
commitment: &C,
context: &str,
signature: &Self::Signature,
) -> Result<(), Error> {
let mut digest = self.new_digest();
commitment.update(&mut digest);
self.verify(key, digest, context, signature)
}
}
#[non_exhaustive]
pub enum AnyScheme {
Ed25519(Ed25519),
}
impl From<Ed25519> for AnyScheme {
fn from(value: Ed25519) -> Self {
Self::Ed25519(value)
}
}
impl SignatureScheme for AnyScheme {
type SigningKey = AnySigningKey;
type VerifyingKey = AnyVerifyingKey;
type Signature = AnySignature;
type Digest = AnyDigest;
fn new_digest(&self) -> Self::Digest {
match self {
Self::Ed25519(s) => AnyDigest::Sha512(s.new_digest()),
}
}
fn sign(
&self,
key: &Self::SigningKey,
digest: Self::Digest,
context: &str,
) -> Result<Self::Signature, Error> {
match (self, key, digest) {
(Self::Ed25519(s), AnySigningKey::Ed25519(key), AnyDigest::Sha512(digest)) => {
Ok(AnySignature::Ed25519(s.sign(key, digest, context)?))
}
_ => Err(Error::new(
eyre!("mismatched signature algorithm"),
ErrorKind::InvalidSignature,
)),
}
}
fn verify(
&self,
key: &Self::VerifyingKey,
digest: Self::Digest,
context: &str,
signature: &Self::Signature,
) -> Result<(), Error> {
match (self, key, digest, signature) {
(
Self::Ed25519(s),
AnyVerifyingKey::Ed25519(key),
AnyDigest::Sha512(digest),
AnySignature::Ed25519(signature),
) => s.verify(key, digest, context, signature),
_ => Err(Error::new(
eyre!("mismatched signature algorithm"),
ErrorKind::InvalidSignature,
)),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, TS)]
#[ts(export, type = "string")]
#[non_exhaustive]
pub enum AnySigningKey {
Ed25519(<Ed25519 as SignatureScheme>::SigningKey),
}
impl AnySigningKey {
pub fn scheme(&self) -> AnyScheme {
match self {
Self::Ed25519(_) => AnyScheme::Ed25519(Ed25519),
}
}
pub fn verifying_key(&self) -> AnyVerifyingKey {
match self {
Self::Ed25519(k) => AnyVerifyingKey::Ed25519(k.into()),
}
}
}
impl<'a> TryFrom<PrivateKeyInfo<'a>> for AnySigningKey {
type Error = pkcs8::Error;
fn try_from(value: PrivateKeyInfo<'a>) -> Result<Self, Self::Error> {
if value.algorithm == ed25519_dalek::pkcs8::ALGORITHM_ID {
Ok(Self::Ed25519(ed25519_dalek::SigningKey::try_from(value)?))
} else {
Err(pkcs8::spki::Error::OidUnknown {
oid: value.algorithm.oid,
}
.into())
}
}
}
impl pkcs8::EncodePrivateKey for AnySigningKey {
fn to_pkcs8_der(&self) -> pkcs8::Result<pkcs8::SecretDocument> {
match self {
Self::Ed25519(s) => s.to_pkcs8_der(),
}
}
}
impl FromStr for AnySigningKey {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use pkcs8::DecodePrivateKey;
Self::from_pkcs8_pem(s).with_kind(ErrorKind::Deserialization)
}
}
impl Display for AnySigningKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use pkcs8::EncodePrivateKey;
f.write_str(
&self
.to_pkcs8_pem(pkcs8::LineEnding::LF)
.map_err(|_| std::fmt::Error)?,
)
}
}
impl<'de> Deserialize<'de> for AnySigningKey {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserialize_from_str(deserializer)
}
}
impl Serialize for AnySigningKey {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serialize_display(self, serializer)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, TS)]
#[ts(export, type = "string")]
#[non_exhaustive]
pub enum AnyVerifyingKey {
Ed25519(<Ed25519 as SignatureScheme>::VerifyingKey),
}
impl AnyVerifyingKey {
pub fn scheme(&self) -> AnyScheme {
match self {
Self::Ed25519(_) => AnyScheme::Ed25519(Ed25519),
}
}
}
impl<'a> TryFrom<SubjectPublicKeyInfo<AnyRef<'a>, BitStringRef<'a>>> for AnyVerifyingKey {
type Error = pkcs8::spki::Error;
fn try_from(
value: SubjectPublicKeyInfo<AnyRef<'a>, BitStringRef<'a>>,
) -> Result<Self, Self::Error> {
if value.algorithm == ed25519_dalek::pkcs8::ALGORITHM_ID {
Ok(Self::Ed25519(ed25519_dalek::VerifyingKey::try_from(value)?))
} else {
Err(pkcs8::spki::Error::OidUnknown {
oid: value.algorithm.oid,
})
}
}
}
impl pkcs8::EncodePublicKey for AnyVerifyingKey {
fn to_public_key_der(&self) -> pkcs8::spki::Result<pkcs8::Document> {
match self {
Self::Ed25519(s) => s.to_public_key_der(),
}
}
}
impl FromStr for AnyVerifyingKey {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use pkcs8::DecodePublicKey;
Self::from_public_key_pem(s).with_kind(ErrorKind::Deserialization)
}
}
impl Display for AnyVerifyingKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use pkcs8::EncodePublicKey;
f.write_str(
&self
.to_public_key_pem(pkcs8::LineEnding::LF)
.map_err(|_| std::fmt::Error)?,
)
}
}
impl<'de> Deserialize<'de> for AnyVerifyingKey {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserialize_from_str(deserializer)
}
}
impl Serialize for AnyVerifyingKey {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serialize_display(self, serializer)
}
}
impl ValueParserFactory for AnyVerifyingKey {
type Parser = FromStrParser<Self>;
fn value_parser() -> Self::Parser {
Self::Parser::new()
}
}
#[derive(Clone, Debug)]
#[non_exhaustive]
pub enum AnyDigest {
Sha512(Sha512),
}
impl digest::Update for AnyDigest {
fn update(&mut self, data: &[u8]) {
match self {
Self::Sha512(d) => digest::Update::update(d, data),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, TS)]
#[ts(export, type = "string")]
pub enum AnySignature {
Ed25519(<Ed25519 as SignatureScheme>::Signature),
}
impl FromStr for AnySignature {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use der::DecodePem;
#[derive(der::Sequence)]
struct AnySignatureDer {
alg: pkcs8::spki::AlgorithmIdentifierOwned,
sig: der::asn1::OctetString,
}
impl der::pem::PemLabel for AnySignatureDer {
const PEM_LABEL: &'static str = "SIGNATURE";
}
let der = AnySignatureDer::from_pem(s.as_bytes()).with_kind(ErrorKind::Deserialization)?;
if der.alg.oid == ed25519_dalek::pkcs8::ALGORITHM_ID.oid
&& der.alg.parameters.owned_to_ref() == ed25519_dalek::pkcs8::ALGORITHM_ID.parameters
{
Ok(Self::Ed25519(
ed25519_dalek::Signature::from_slice(der.sig.as_bytes())
.with_kind(ErrorKind::Deserialization)?,
))
} else {
Err(pkcs8::spki::Error::OidUnknown { oid: der.alg.oid })
.with_kind(ErrorKind::Deserialization)
}
}
}
impl Display for AnySignature {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use der::EncodePem;
#[derive(der::Sequence)]
struct AnySignatureDer<'a> {
alg: pkcs8::AlgorithmIdentifierRef<'a>,
sig: der::asn1::OctetString,
}
impl<'a> der::pem::PemLabel for AnySignatureDer<'a> {
const PEM_LABEL: &'static str = "SIGNATURE";
}
f.write_str(
&match self {
Self::Ed25519(s) => AnySignatureDer {
alg: ed25519_dalek::pkcs8::ALGORITHM_ID,
sig: der::asn1::OctetString::new(s.to_bytes()).map_err(|_| std::fmt::Error)?,
},
}
.to_pem(der::pem::LineEnding::LF)
.map_err(|_| std::fmt::Error)?,
)
}
}
impl<'de> Deserialize<'de> for AnySignature {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserialize_from_str(deserializer)
}
}
impl Serialize for AnySignature {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serialize_display(self, serializer)
}
}