Feature/registry package index (#2623)

* include system images in compat s9pk

* wip

* wip

* update types

* wip

* fix signature serialization

* Add SignatureHeader conversions

* finish display impl for get

---------

Co-authored-by: Shadowy Super Coder <musashidisciple@proton.me>
This commit is contained in:
Aiden McClelland
2024-05-31 12:13:23 -06:00
committed by GitHub
parent 0ccbb52c1f
commit fd7c2fbe93
113 changed files with 3265 additions and 1436 deletions

View File

@@ -1,24 +1,35 @@
use std::path::Path;
use crate::Error;
use tokio::process::Command;
use crate::prelude::*;
use crate::util::Invoke;
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
pub struct GitHash(String);
impl GitHash {
pub async fn from_path(path: impl AsRef<Path>) -> Result<GitHash, Error> {
let hash = tokio::process::Command::new("git")
.args(["describe", "--always", "--abbrev=40", "--dirty=-modified"])
.current_dir(path)
.output()
.await?;
if !hash.status.success() {
return Err(Error::new(
color_eyre::eyre::eyre!("Could not get hash: {}", String::from_utf8(hash.stderr)?),
crate::ErrorKind::Filesystem,
));
let mut hash = String::from_utf8(
Command::new("git")
.arg("rev-parse")
.arg("HEAD")
.current_dir(&path)
.invoke(ErrorKind::Git)
.await?,
)?;
if Command::new("git")
.arg("diff-index")
.arg("--quiet")
.arg("HEAD")
.arg("--")
.invoke(ErrorKind::Git)
.await
.is_err()
{
hash += "-modified";
}
Ok(GitHash(String::from_utf8(hash.stdout)?))
Ok(GitHash(hash))
}
}

View File

@@ -12,11 +12,12 @@ use itertools::Itertools;
use tokio::io::AsyncRead;
use crate::prelude::*;
use crate::s9pk::merkle_archive::sink::{Sink, TrackingWriter};
use crate::s9pk::merkle_archive::sink::Sink;
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section};
use crate::s9pk::merkle_archive::write_queue::WriteQueue;
use crate::s9pk::merkle_archive::{varint, Entry, EntryContents};
use crate::util::io::ParallelBlake3Writer;
use crate::util::io::{ParallelBlake3Writer, TrackingIO};
use crate::CAP_10_MiB;
#[derive(Clone)]
pub struct DirectoryContents<S> {
@@ -151,7 +152,7 @@ impl<S: Clone> DirectoryContents<S> {
Ok(())
}
}
impl<S: ArchiveSource> DirectoryContents<Section<S>> {
impl<S: ArchiveSource + Clone> DirectoryContents<Section<S>> {
#[instrument(skip_all)]
pub fn deserialize<'a>(
source: &'a S,
@@ -181,7 +182,7 @@ impl<S: ArchiveSource> DirectoryContents<Section<S>> {
let mut entries = OrdMap::new();
for _ in 0..len {
let name = varint::deserialize_varstring(&mut toc_reader).await?;
let entry = Entry::deserialize(source, &mut toc_reader).await?;
let entry = Entry::deserialize(source.clone(), &mut toc_reader).await?;
entries.insert(name.into(), entry);
}
@@ -202,7 +203,7 @@ impl<S: ArchiveSource> DirectoryContents<Section<S>> {
.boxed()
}
}
impl<S: FileSource> DirectoryContents<S> {
impl<S: FileSource + Clone> DirectoryContents<S> {
pub fn filter(&mut self, filter: impl Fn(&Path) -> bool) -> Result<(), Error> {
for k in self.keys().cloned().collect::<Vec<_>>() {
let path = Path::new(&*k);
@@ -239,8 +240,7 @@ impl<S: FileSource> DirectoryContents<S> {
#[instrument(skip_all)]
pub fn sighash<'a>(&'a self) -> BoxFuture<'a, Result<Hash, Error>> {
async move {
let mut hasher =
TrackingWriter::new(0, ParallelBlake3Writer::new(super::hash::BUFFER_CAPACITY));
let mut hasher = TrackingIO::new(0, ParallelBlake3Writer::new(CAP_10_MiB));
let mut sig_contents = OrdMap::new();
for (name, entry) in &**self {
sig_contents.insert(name.clone(), entry.to_missing().await?);
@@ -280,6 +280,7 @@ impl<S: FileSource> DirectoryContents<S> {
Ok(())
}
pub fn into_dyn(self) -> DirectoryContents<DynFileSource> {
DirectoryContents {
contents: self

View File

@@ -2,9 +2,10 @@ use blake3::Hash;
use tokio::io::AsyncRead;
use crate::prelude::*;
use crate::s9pk::merkle_archive::sink::{Sink, TrackingWriter};
use crate::s9pk::merkle_archive::sink::Sink;
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section};
use crate::util::io::ParallelBlake3Writer;
use crate::util::io::{ParallelBlake3Writer, TrackingIO};
use crate::CAP_10_MiB;
#[derive(Debug, Clone)]
pub struct FileContents<S>(S);
@@ -19,7 +20,7 @@ impl<S> FileContents<S> {
impl<S: ArchiveSource> FileContents<Section<S>> {
#[instrument(skip_all)]
pub async fn deserialize(
source: &S,
source: S,
header: &mut (impl AsyncRead + Unpin + Send),
size: u64,
) -> Result<Self, Error> {
@@ -34,8 +35,7 @@ impl<S: ArchiveSource> FileContents<Section<S>> {
}
impl<S: FileSource> FileContents<S> {
pub async fn hash(&self) -> Result<(Hash, u64), Error> {
let mut hasher =
TrackingWriter::new(0, ParallelBlake3Writer::new(super::hash::BUFFER_CAPACITY));
let mut hasher = TrackingIO::new(0, ParallelBlake3Writer::new(CAP_10_MiB));
self.serialize_body(&mut hasher, None).await?;
let size = hasher.position();
let hash = hasher.into_inner().finalize().await?;

View File

@@ -6,8 +6,7 @@ use tokio_util::either::Either;
use crate::prelude::*;
use crate::util::io::{ParallelBlake3Writer, TeeWriter};
pub const BUFFER_CAPACITY: usize = 10 * 1024 * 1024; // 10MiB
use crate::CAP_10_MiB;
#[pin_project::pin_project]
pub struct VerifyingWriter<W> {
@@ -21,8 +20,8 @@ impl<W: AsyncWrite> VerifyingWriter<W> {
writer: if verify.is_some() {
Either::Left(TeeWriter::new(
w,
ParallelBlake3Writer::new(BUFFER_CAPACITY),
BUFFER_CAPACITY,
ParallelBlake3Writer::new(CAP_10_MiB),
CAP_10_MiB,
))
} else {
Either::Right(w)

View File

@@ -7,11 +7,16 @@ use sha2::{Digest, Sha512};
use tokio::io::AsyncRead;
use crate::prelude::*;
use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment;
use crate::registry::signer::sign::ed25519::Ed25519;
use crate::registry::signer::sign::SignatureScheme;
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
use crate::s9pk::merkle_archive::file_contents::FileContents;
use crate::s9pk::merkle_archive::sink::Sink;
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section};
use crate::s9pk::merkle_archive::write_queue::WriteQueue;
use crate::util::serde::Base64;
use crate::CAP_1_MiB;
pub mod directory_contents;
pub mod file_contents;
@@ -70,12 +75,13 @@ impl<S> MerkleArchive<S> {
self.contents.sort_by(sort_by)
}
}
impl<S: ArchiveSource> MerkleArchive<Section<S>> {
impl<S: ArchiveSource + Clone> MerkleArchive<Section<S>> {
#[instrument(skip_all)]
pub async fn deserialize(
source: &S,
context: &str,
header: &mut (impl AsyncRead + Unpin + Send),
commitment: Option<&MerkleArchiveCommitment>,
) -> Result<Self, Error> {
use tokio::io::AsyncReadExt;
@@ -101,6 +107,32 @@ impl<S: ArchiveSource> MerkleArchive<Section<S>> {
&signature,
)?;
if let Some(MerkleArchiveCommitment {
root_sighash,
root_maxsize,
}) = commitment
{
if sighash.as_bytes() != &**root_sighash {
return Err(Error::new(
eyre!("merkle root mismatch"),
ErrorKind::InvalidSignature,
));
}
if max_size > *root_maxsize {
return Err(Error::new(
eyre!("merkle root directory max size too large"),
ErrorKind::InvalidSignature,
));
}
} else {
if max_size > CAP_1_MiB as u64 {
return Err(Error::new(
eyre!("merkle root directory max size over 1MiB, cancelling download in case of DOS attack"),
ErrorKind::InvalidSignature,
));
}
}
let contents = DirectoryContents::deserialize(source, header, (sighash, max_size)).await?;
Ok(Self {
@@ -109,38 +141,50 @@ impl<S: ArchiveSource> MerkleArchive<Section<S>> {
})
}
}
impl<S: FileSource> MerkleArchive<S> {
impl<S: FileSource + Clone> MerkleArchive<S> {
pub async fn update_hashes(&mut self, only_missing: bool) -> Result<(), Error> {
self.contents.update_hashes(only_missing).await
}
pub fn filter(&mut self, filter: impl Fn(&Path) -> bool) -> Result<(), Error> {
self.contents.filter(filter)
}
pub async fn commitment(&self) -> Result<MerkleArchiveCommitment, Error> {
let root_maxsize = match self.signer {
Signer::Signed(_, _, s, _) => s,
_ => self.contents.toc_size(),
};
let root_sighash = self.contents.sighash().await?;
Ok(MerkleArchiveCommitment {
root_sighash: Base64(*root_sighash.as_bytes()),
root_maxsize,
})
}
pub async fn signature(&self) -> Result<Signature, Error> {
match &self.signer {
Signer::Signed(_, s, _, _) => Ok(*s),
Signer::Signer(k, context) => {
Ed25519.sign_commitment(k, &self.commitment().await?, context)
}
}
}
#[instrument(skip_all)]
pub async fn serialize<W: Sink>(&self, w: &mut W, verify: bool) -> Result<(), Error> {
use tokio::io::AsyncWriteExt;
let sighash = self.contents.sighash().await?;
let size = self.contents.toc_size();
let commitment = self.commitment().await?;
let (pubkey, signature, max_size) = match &self.signer {
Signer::Signed(pubkey, signature, max_size, _) => (*pubkey, *signature, *max_size),
Signer::Signer(s, context) => (
s.into(),
ed25519_dalek::SigningKey::sign_prehashed(
s,
Sha512::new_with_prefix(sighash.as_bytes())
.chain_update(&u64::to_be_bytes(size)),
Some(context.as_bytes()),
)?,
size,
),
let (pubkey, signature) = match &self.signer {
Signer::Signed(pubkey, signature, _, _) => (*pubkey, *signature),
Signer::Signer(s, context) => {
(s.into(), Ed25519.sign_commitment(s, &commitment, context)?)
}
};
w.write_all(pubkey.as_bytes()).await?;
w.write_all(&signature.to_bytes()).await?;
w.write_all(sighash.as_bytes()).await?;
w.write_all(&u64::to_be_bytes(max_size)).await?;
w.write_all(&*commitment.root_sighash).await?;
w.write_all(&u64::to_be_bytes(commitment.root_maxsize))
.await?;
let mut next_pos = w.current_position().await?;
next_pos += DirectoryContents::<S>::header_size();
self.contents.serialize_header(next_pos, w).await?;
@@ -216,11 +260,10 @@ impl<S> Entry<S> {
+ self.contents.header_size()
}
}
impl<S: Clone> Entry<S> {}
impl<S: ArchiveSource> Entry<Section<S>> {
impl<S: ArchiveSource + Clone> Entry<Section<S>> {
#[instrument(skip_all)]
pub async fn deserialize(
source: &S,
source: S,
header: &mut (impl AsyncRead + Unpin + Send),
) -> Result<Self, Error> {
use tokio::io::AsyncReadExt;
@@ -241,24 +284,19 @@ impl<S: ArchiveSource> Entry<Section<S>> {
})
}
}
impl<S: FileSource> Entry<S> {
impl<S: FileSource + Clone> Entry<S> {
pub fn filter(&mut self, filter: impl Fn(&Path) -> bool) -> Result<(), Error> {
if let EntryContents::Directory(d) = &mut self.contents {
d.filter(filter)?;
}
Ok(())
}
pub async fn read_file_to_vec(&self) -> Result<Vec<u8>, Error> {
match self.as_contents() {
EntryContents::File(f) => Ok(f.to_vec(self.hash).await?),
EntryContents::Directory(_) => Err(Error::new(
eyre!("expected file, found directory"),
ErrorKind::ParseS9pk,
)),
EntryContents::Missing => {
Err(Error::new(eyre!("entry is missing"), ErrorKind::ParseS9pk))
}
pub async fn update_hash(&mut self, only_missing: bool) -> Result<(), Error> {
if let EntryContents::Directory(d) = &mut self.contents {
d.update_hashes(only_missing).await?;
}
self.hash = Some(self.contents.hash().await?);
Ok(())
}
pub async fn to_missing(&self) -> Result<Self, Error> {
let hash = if let Some(hash) = self.hash {
@@ -271,13 +309,6 @@ impl<S: FileSource> Entry<S> {
contents: EntryContents::Missing,
})
}
pub async fn update_hash(&mut self, only_missing: bool) -> Result<(), Error> {
if let EntryContents::Directory(d) = &mut self.contents {
d.update_hashes(only_missing).await?;
}
self.hash = Some(self.contents.hash().await?);
Ok(())
}
#[instrument(skip_all)]
pub async fn serialize_header<W: Sink>(
&self,
@@ -302,6 +333,20 @@ impl<S: FileSource> Entry<S> {
}
}
}
impl<S: FileSource> Entry<S> {
pub async fn read_file_to_vec(&self) -> Result<Vec<u8>, Error> {
match self.as_contents() {
EntryContents::File(f) => Ok(f.to_vec(self.hash).await?),
EntryContents::Directory(_) => Err(Error::new(
eyre!("expected file, found directory"),
ErrorKind::ParseS9pk,
)),
EntryContents::Missing => {
Err(Error::new(eyre!("entry is missing"), ErrorKind::ParseS9pk))
}
}
}
}
#[derive(Debug, Clone)]
pub enum EntryContents<S> {
@@ -329,10 +374,10 @@ impl<S> EntryContents<S> {
matches!(self, &EntryContents::Directory(_))
}
}
impl<S: ArchiveSource> EntryContents<Section<S>> {
impl<S: ArchiveSource + Clone> EntryContents<Section<S>> {
#[instrument(skip_all)]
pub async fn deserialize(
source: &S,
source: S,
header: &mut (impl AsyncRead + Unpin + Send),
(hash, size): (Hash, u64),
) -> Result<Self, Error> {
@@ -346,7 +391,7 @@ impl<S: ArchiveSource> EntryContents<Section<S>> {
FileContents::deserialize(source, header, size).await?,
)),
2 => Ok(Self::Directory(
DirectoryContents::deserialize(source, header, (hash, size)).await?,
DirectoryContents::deserialize(&source, header, (hash, size)).await?,
)),
id => Err(Error::new(
eyre!("Unknown type id {id} found in MerkleArchive"),
@@ -355,7 +400,7 @@ impl<S: ArchiveSource> EntryContents<Section<S>> {
}
}
}
impl<S: FileSource> EntryContents<S> {
impl<S: FileSource + Clone> EntryContents<S> {
pub async fn hash(&self) -> Result<(Hash, u64), Error> {
match self {
Self::Missing => Err(Error::new(
@@ -366,6 +411,15 @@ impl<S: FileSource> EntryContents<S> {
Self::Directory(d) => Ok((d.sighash().await?, d.toc_size())),
}
}
pub fn into_dyn(self) -> EntryContents<DynFileSource> {
match self {
Self::Missing => EntryContents::Missing,
Self::File(f) => EntryContents::File(f.into_dyn()),
Self::Directory(d) => EntryContents::Directory(d.into_dyn()),
}
}
}
impl<S: FileSource> EntryContents<S> {
#[instrument(skip_all)]
pub async fn serialize_header<W: Sink>(
&self,
@@ -381,11 +435,4 @@ impl<S: FileSource> EntryContents<S> {
Self::Directory(d) => Some(d.serialize_header(position, w).await?),
})
}
pub fn into_dyn(self) -> EntryContents<DynFileSource> {
match self {
Self::Missing => EntryContents::Missing,
Self::File(f) => EntryContents::File(f.into_dyn()),
Self::Directory(d) => EntryContents::Directory(d.into_dyn()),
}
}
}

View File

@@ -1,6 +1,7 @@
use tokio::io::{AsyncSeek, AsyncWrite};
use crate::prelude::*;
use crate::util::io::TrackingIO;
#[async_trait::async_trait]
pub trait Sink: AsyncWrite + Unpin + Send {
@@ -17,57 +18,8 @@ impl<S: AsyncWrite + AsyncSeek + Unpin + Send> Sink for S {
}
#[async_trait::async_trait]
impl<W: AsyncWrite + Unpin + Send> Sink for TrackingWriter<W> {
impl<W: AsyncWrite + Unpin + Send> Sink for TrackingIO<W> {
async fn current_position(&mut self) -> Result<u64, Error> {
Ok(self.position)
}
}
#[pin_project::pin_project]
pub struct TrackingWriter<W> {
position: u64,
#[pin]
writer: W,
}
impl<W> TrackingWriter<W> {
pub fn new(start: u64, w: W) -> Self {
Self {
position: start,
writer: w,
}
}
pub fn position(&self) -> u64 {
self.position
}
pub fn into_inner(self) -> W {
self.writer
}
}
impl<W: AsyncWrite + Unpin + Send> AsyncWrite for TrackingWriter<W> {
fn poll_write(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &[u8],
) -> std::task::Poll<Result<usize, std::io::Error>> {
let this = self.project();
match this.writer.poll_write(cx, buf) {
std::task::Poll::Ready(Ok(written)) => {
*this.position += written as u64;
std::task::Poll::Ready(Ok(written))
}
a => a,
}
}
fn poll_flush(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), std::io::Error>> {
self.project().writer.poll_flush(cx)
}
fn poll_shutdown(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), std::io::Error>> {
self.project().writer.poll_shutdown(cx)
Ok(self.position())
}
}

View File

@@ -1,23 +1,25 @@
use std::collections::BTreeSet;
use std::pin::Pin;
use std::sync::{Arc, Mutex};
use std::task::Poll;
use bytes::Bytes;
use futures::stream::BoxStream;
use futures::{StreamExt, TryStreamExt};
use futures::{Stream, StreamExt, TryStreamExt};
use reqwest::header::{ACCEPT_RANGES, CONTENT_LENGTH, RANGE};
use reqwest::{Client, Url};
use tokio::io::{AsyncRead, AsyncReadExt, Take};
use tokio::io::{AsyncRead, AsyncReadExt, ReadBuf, Take};
use tokio_util::io::StreamReader;
use crate::prelude::*;
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::io::TrackingIO;
use crate::util::Apply;
#[derive(Clone)]
pub struct HttpSource {
url: Url,
client: Client,
size: Option<u64>,
range_support: Result<
(),
(), // Arc<Mutex<Option<RangelessReader>>>
>,
range_support: Result<(), Arc<Mutex<BTreeSet<TrackingIO<HttpBodyReader>>>>>,
}
impl HttpSource {
pub async fn new(client: Client, url: Url) -> Result<Self, Error> {
@@ -32,7 +34,8 @@ impl HttpSource {
.headers()
.get(ACCEPT_RANGES)
.and_then(|s| s.to_str().ok())
== Some("bytes");
== Some("bytes")
&& false;
let size = head
.headers()
.get(CONTENT_LENGTH)
@@ -45,53 +48,141 @@ impl HttpSource {
range_support: if range_support {
Ok(())
} else {
Err(()) // Err(Arc::new(Mutex::new(None)))
Err(Arc::new(Mutex::new(BTreeSet::new())))
},
})
}
}
impl ArchiveSource for HttpSource {
type Reader = Take<HttpReader>;
type Reader = HttpReader;
async fn size(&self) -> Option<u64> {
self.size
}
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
Ok(StreamReader::new(
self.client
.get(self.url.clone())
.send()
.await
.with_kind(ErrorKind::Network)?
.error_for_status()
.with_kind(ErrorKind::Network)?
.bytes_stream()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
.apply(boxed),
))
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
match self.range_support {
Ok(_) => Ok(HttpReader::Range(StreamReader::new(if size > 0 {
self.client
.get(self.url.clone())
.header(RANGE, format!("bytes={}-{}", position, position + size - 1))
.send()
.await
.with_kind(ErrorKind::Network)?
.error_for_status()
.with_kind(ErrorKind::Network)?
.bytes_stream()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
.boxed()
} else {
futures::stream::empty().boxed()
}))
.take(size)),
_ => todo!(),
match &self.range_support {
Ok(_) => Ok(HttpReader::Range(
StreamReader::new(if size > 0 {
self.client
.get(self.url.clone())
.header(RANGE, format!("bytes={}-{}", position, position + size - 1))
.send()
.await
.with_kind(ErrorKind::Network)?
.error_for_status()
.with_kind(ErrorKind::Network)?
.bytes_stream()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
.apply(boxed)
} else {
futures::stream::empty().apply(boxed)
})
.take(size),
)),
Err(pool) => {
fn get_reader_for(
pool: &Arc<Mutex<BTreeSet<TrackingIO<HttpBodyReader>>>>,
position: u64,
) -> Option<TrackingIO<HttpBodyReader>> {
let mut lock = pool.lock().unwrap();
let pos = lock.range(..position).last()?.position();
lock.take(&pos)
}
let reader = get_reader_for(pool, position);
let mut reader = if let Some(reader) = reader {
reader
} else {
TrackingIO::new(
0,
StreamReader::new(
self.client
.get(self.url.clone())
.send()
.await
.with_kind(ErrorKind::Network)?
.error_for_status()
.with_kind(ErrorKind::Network)?
.bytes_stream()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
.apply(boxed),
),
)
};
if reader.position() < position {
let to_skip = position - reader.position();
tokio::io::copy(&mut (&mut reader).take(to_skip), &mut tokio::io::sink())
.await?;
}
Ok(HttpReader::Rangeless {
pool: pool.clone(),
reader: Some(reader.take(size)),
})
}
}
}
}
#[pin_project::pin_project(project = HttpReaderProj)]
type BoxStream<'a, T> = Pin<Box<dyn Stream<Item = T> + Send + Sync + 'a>>;
fn boxed<'a, T>(stream: impl Stream<Item = T> + Send + Sync + 'a) -> BoxStream<'a, T> {
Box::pin(stream)
}
type HttpBodyReader = StreamReader<BoxStream<'static, Result<Bytes, std::io::Error>>, Bytes>;
#[pin_project::pin_project(project = HttpReaderProj, PinnedDrop)]
pub enum HttpReader {
Range(#[pin] StreamReader<BoxStream<'static, Result<Bytes, std::io::Error>>, Bytes>),
// Rangeless(#[pin] RangelessReader),
Range(#[pin] Take<HttpBodyReader>),
Rangeless {
pool: Arc<Mutex<BTreeSet<TrackingIO<HttpBodyReader>>>>,
#[pin]
reader: Option<Take<TrackingIO<HttpBodyReader>>>,
},
}
impl AsyncRead for HttpReader {
fn poll_read(
self: std::pin::Pin<&mut Self>,
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut tokio::io::ReadBuf<'_>,
) -> std::task::Poll<std::io::Result<()>> {
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
match self.project() {
HttpReaderProj::Range(r) => r.poll_read(cx, buf),
// HttpReaderProj::Rangeless(r) => r.poll_read(cx, buf),
HttpReaderProj::Rangeless { mut reader, .. } => {
let mut finished = false;
if let Some(reader) = reader.as_mut().as_pin_mut() {
let start = buf.filled().len();
futures::ready!(reader.poll_read(cx, buf)?);
finished = start == buf.filled().len();
}
if finished {
reader.take();
}
Poll::Ready(Ok(()))
}
}
}
}
#[pin_project::pinned_drop]
impl PinnedDrop for HttpReader {
fn drop(self: Pin<&mut Self>) {
match self.project() {
HttpReaderProj::Range(_) => (),
HttpReaderProj::Rangeless { pool, mut reader } => {
if let Some(reader) = reader.take() {
pool.lock().unwrap().insert(reader.into_inner());
}
}
}
}
}

View File

@@ -1,3 +1,4 @@
use std::ops::Deref;
use std::path::PathBuf;
use std::sync::Arc;
@@ -13,7 +14,7 @@ use crate::s9pk::merkle_archive::hash::VerifyingWriter;
pub mod http;
pub mod multi_cursor_file;
pub trait FileSource: Clone + Send + Sync + Sized + 'static {
pub trait FileSource: Send + Sync + Sized + 'static {
type Reader: AsyncRead + Unpin + Send;
fn size(&self) -> impl Future<Output = Result<u64, Error>> + Send;
fn reader(&self) -> impl Future<Output = Result<Self::Reader, Error>> + Send;
@@ -61,6 +62,29 @@ pub trait FileSource: Clone + Send + Sync + Sized + 'static {
}
}
impl<T: FileSource> FileSource for Arc<T> {
type Reader = T::Reader;
async fn size(&self) -> Result<u64, Error> {
self.deref().size().await
}
async fn reader(&self) -> Result<Self::Reader, Error> {
self.deref().reader().await
}
async fn copy<W: AsyncWrite + Unpin + Send + ?Sized>(&self, w: &mut W) -> Result<(), Error> {
self.deref().copy(w).await
}
async fn copy_verify<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
w: &mut W,
verify: Option<(Hash, u64)>,
) -> Result<(), Error> {
self.deref().copy_verify(w, verify).await
}
async fn to_vec(&self, verify: Option<(Hash, u64)>) -> Result<Vec<u8>, Error> {
self.deref().to_vec(verify).await
}
}
#[derive(Clone)]
pub struct DynFileSource(Arc<dyn DynableFileSource>);
impl DynFileSource {
@@ -155,16 +179,28 @@ impl FileSource for Arc<[u8]> {
}
}
pub trait ArchiveSource: Clone + Send + Sync + Sized + 'static {
pub trait ArchiveSource: Send + Sync + Sized + 'static {
type Reader: AsyncRead + Unpin + Send;
fn size(&self) -> impl Future<Output = Option<u64>> + Send {
async { None }
}
fn fetch_all(
&self,
) -> impl Future<Output = Result<impl AsyncRead + Unpin + Send, Error>> + Send;
fn fetch(
&self,
position: u64,
size: u64,
) -> impl Future<Output = Result<Self::Reader, Error>> + Send;
fn copy_all_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
w: &mut W,
) -> impl Future<Output = Result<(), Error>> + Send {
async move {
tokio::io::copy(&mut self.fetch_all().await?, w).await?;
Ok(())
}
}
fn copy_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
position: u64,
@@ -176,17 +212,47 @@ pub trait ArchiveSource: Clone + Send + Sync + Sized + 'static {
Ok(())
}
}
fn section(&self, position: u64, size: u64) -> Section<Self> {
fn section(self, position: u64, size: u64) -> Section<Self> {
Section {
source: self.clone(),
source: self,
position,
size,
}
}
}
impl<T: ArchiveSource> ArchiveSource for Arc<T> {
type Reader = T::Reader;
async fn size(&self) -> Option<u64> {
self.deref().size().await
}
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
self.deref().fetch_all().await
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
self.deref().fetch(position, size).await
}
async fn copy_all_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
w: &mut W,
) -> Result<(), Error> {
self.deref().copy_all_to(w).await
}
async fn copy_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
position: u64,
size: u64,
w: &mut W,
) -> Result<(), Error> {
self.deref().copy_to(position, size, w).await
}
}
impl ArchiveSource for Arc<[u8]> {
type Reader = tokio::io::Take<std::io::Cursor<Self>>;
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
Ok(std::io::Cursor::new(self.clone()))
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
use tokio::io::AsyncReadExt;

View File

@@ -1,18 +1,35 @@
use std::io::SeekFrom;
use std::os::fd::{AsRawFd, RawFd};
use std::path::{Path, PathBuf};
use std::pin::Pin;
use std::sync::Arc;
use std::task::Poll;
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncReadExt};
use tokio::io::{AsyncRead, AsyncReadExt, ReadBuf, Take};
use tokio::sync::{Mutex, OwnedMutexGuard};
use crate::disk::mount::filesystem::loop_dev::LoopDev;
use crate::prelude::*;
use crate::s9pk::merkle_archive::source::{ArchiveSource, Section};
fn path_from_fd(fd: RawFd) -> PathBuf {
Path::new("/proc/self/fd").join(fd.to_string())
fn path_from_fd(fd: RawFd) -> Result<PathBuf, Error> {
#[cfg(target_os = "linux")]
let path = Path::new("/proc/self/fd").join(fd.to_string());
#[cfg(target_os = "macos")] // here be dragons
let path = unsafe {
let mut buf = [0u8; libc::PATH_MAX as usize];
if libc::fcntl(fd, libc::F_GETPATH, buf.as_mut_ptr().cast::<libc::c_char>()) == -1 {
return Err(std::io::Error::last_os_error().into());
}
Path::new(
&*std::ffi::CStr::from_bytes_until_nul(&buf)
.with_kind(ErrorKind::Utf8)?
.to_string_lossy(),
)
.to_owned()
};
Ok(path)
}
#[derive(Clone)]
@@ -21,18 +38,26 @@ pub struct MultiCursorFile {
file: Arc<Mutex<File>>,
}
impl MultiCursorFile {
fn path(&self) -> PathBuf {
fn path(&self) -> Result<PathBuf, Error> {
path_from_fd(self.fd)
}
pub async fn open(fd: &impl AsRawFd) -> Result<Self, Error> {
let fd = fd.as_raw_fd();
Ok(Self {
fd,
file: Arc::new(Mutex::new(File::open(path_from_fd(fd)).await?)),
})
let f = File::open(path_from_fd(fd.as_raw_fd())?).await?;
Ok(Self::from(f))
}
pub async fn cursor(&self) -> Result<FileCursor, Error> {
Ok(FileCursor(
if let Ok(file) = self.file.clone().try_lock_owned() {
file
} else {
Arc::new(Mutex::new(File::open(self.path()?).await?))
.try_lock_owned()
.expect("freshly created")
},
))
}
pub async fn blake3_mmap(&self) -> Result<blake3::Hash, Error> {
let path = self.path();
let path = self.path()?;
tokio::task::spawn_blocking(move || {
let mut hasher = blake3::Hasher::new();
hasher.update_mmap_rayon(path)?;
@@ -52,76 +77,44 @@ impl From<File> for MultiCursorFile {
}
#[pin_project::pin_project]
pub struct FileSectionReader {
#[pin]
file: OwnedMutexGuard<File>,
remaining: u64,
}
impl AsyncRead for FileSectionReader {
pub struct FileCursor(#[pin] OwnedMutexGuard<File>);
impl AsyncRead for FileCursor {
fn poll_read(
self: std::pin::Pin<&mut Self>,
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut tokio::io::ReadBuf<'_>,
) -> std::task::Poll<std::io::Result<()>> {
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
let this = self.project();
if *this.remaining == 0 {
return std::task::Poll::Ready(Ok(()));
}
let before = buf.filled().len() as u64;
let res = std::pin::Pin::new(&mut (&mut **this.file.get_mut()).take(*this.remaining))
.poll_read(cx, buf);
*this.remaining = this
.remaining
.saturating_sub(buf.filled().len() as u64 - before);
res
Pin::new(&mut (&mut **this.0.get_mut())).poll_read(cx, buf)
}
}
impl ArchiveSource for MultiCursorFile {
type Reader = FileSectionReader;
type Reader = Take<FileCursor>;
async fn size(&self) -> Option<u64> {
tokio::fs::metadata(self.path()).await.ok().map(|m| m.len())
tokio::fs::metadata(self.path().ok()?)
.await
.ok()
.map(|m| m.len())
}
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
use tokio::io::AsyncSeekExt;
let mut file = self.cursor().await?;
file.0.seek(SeekFrom::Start(0)).await?;
Ok(file)
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
use tokio::io::AsyncSeekExt;
let mut file = if let Ok(file) = self.file.clone().try_lock_owned() {
file
} else {
#[cfg(target_os = "linux")]
let file = File::open(self.path()).await?;
#[cfg(target_os = "macos")] // here be dragons
let file = unsafe {
let mut buf = [0u8; libc::PATH_MAX as usize];
if libc::fcntl(
self.fd,
libc::F_GETPATH,
buf.as_mut_ptr().cast::<libc::c_char>(),
) == -1
{
return Err(std::io::Error::last_os_error().into());
}
File::open(
&*std::ffi::CStr::from_bytes_until_nul(&buf)
.with_kind(ErrorKind::Utf8)?
.to_string_lossy(),
)
.await?
};
Arc::new(Mutex::new(file))
.try_lock_owned()
.expect("freshly created")
};
file.seek(SeekFrom::Start(position)).await?;
Ok(Self::Reader {
file,
remaining: size,
})
let mut file = self.cursor().await?;
file.0.seek(SeekFrom::Start(position)).await?;
Ok(file.take(size))
}
}
impl From<&Section<MultiCursorFile>> for LoopDev<PathBuf> {
fn from(value: &Section<MultiCursorFile>) -> Self {
LoopDev::new(value.source.path(), value.position, value.size)
LoopDev::new(value.source.path().unwrap(), value.position, value.size)
}
}

View File

@@ -8,9 +8,9 @@ use ed25519_dalek::SigningKey;
use crate::prelude::*;
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
use crate::s9pk::merkle_archive::file_contents::FileContents;
use crate::s9pk::merkle_archive::sink::TrackingWriter;
use crate::s9pk::merkle_archive::source::FileSource;
use crate::s9pk::merkle_archive::{Entry, EntryContents, MerkleArchive};
use crate::util::io::TrackingIO;
/// Creates a MerkleArchive (a1) with the provided files at the provided paths. NOTE: later files can overwrite previous files/directories at the same path
/// Tests:
@@ -60,10 +60,15 @@ fn test(files: Vec<(PathBuf, String)>) -> Result<(), Error> {
.block_on(async move {
a1.update_hashes(true).await?;
let mut s1 = Vec::new();
a1.serialize(&mut TrackingWriter::new(0, &mut s1), true)
.await?;
a1.serialize(&mut TrackingIO::new(0, &mut s1), true).await?;
let s1: Arc<[u8]> = s1.into();
let a2 = MerkleArchive::deserialize(&s1, "test", &mut Cursor::new(s1.clone())).await?;
let a2 = MerkleArchive::deserialize(
&s1,
"test",
&mut Cursor::new(s1.clone()),
Some(&a1.commitment().await?),
)
.await?;
for (path, content) in check_set {
match a2
@@ -88,8 +93,7 @@ fn test(files: Vec<(PathBuf, String)>) -> Result<(), Error> {
}
let mut s2 = Vec::new();
a2.serialize(&mut TrackingWriter::new(0, &mut s2), true)
.await?;
a2.serialize(&mut TrackingIO::new(0, &mut s2), true).await?;
let s2: Arc<[u8]> = s2.into();
ensure_code!(s1 == s2, ErrorKind::Pack, "s1 does not match s2");

View File

@@ -30,6 +30,8 @@ impl<'a, S: FileSource> WriteQueue<'a, S> {
self.queue.push_back(entry);
Ok(res)
}
}
impl<'a, S: FileSource + Clone> WriteQueue<'a, S> {
pub async fn serialize<W: Sink>(&mut self, w: &mut W, verify: bool) -> Result<(), Error> {
loop {
let Some(next) = self.queue.pop_front() else {

View File

@@ -1,3 +1,4 @@
pub mod git_hash;
pub mod merkle_archive;
pub mod rpc;
pub mod v1;

View File

@@ -172,14 +172,14 @@ async fn add_image(
.join(&arch)
.join(&id)
.with_extension("env"),
Entry::file(DynFileSource::new(Arc::from(Vec::from(env)))),
Entry::file(DynFileSource::new(Arc::<[u8]>::from(Vec::from(env)))),
)?;
archive.contents_mut().insert_path(
Path::new("images")
.join(&arch)
.join(&id)
.with_extension("json"),
Entry::file(DynFileSource::new(Arc::from(
Entry::file(DynFileSource::new(Arc::<[u8]>::from(
serde_json::to_vec(&serde_json::json!({
"workdir": workdir
}))

View File

@@ -3,35 +3,38 @@ use std::path::{Path, PathBuf};
use emver::VersionRange;
use imbl_value::InOMap;
use indexmap::IndexMap;
pub use models::PackageId;
use models::VolumeId;
use models::{ActionId, HealthCheckId, ImageId, VolumeId};
use serde::{Deserialize, Serialize};
use url::Url;
use super::git_hash::GitHash;
use crate::prelude::*;
use crate::s9pk::git_hash::GitHash;
use crate::s9pk::manifest::{Alerts, Description, HardwareRequirements};
use crate::util::Version;
use crate::util::serde::{Duration, IoFormat};
use crate::util::VersionString;
use crate::version::{Current, VersionT};
fn current_version() -> Version {
fn current_version() -> VersionString {
Current::new().semver().into()
}
#[derive(Clone, Debug, Deserialize, Serialize, HasModel)]
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
#[model = "Model<Self>"]
pub struct Manifest {
#[serde(default = "current_version")]
pub eos_version: Version,
pub eos_version: VersionString,
pub id: PackageId,
#[serde(default)]
pub git_hash: Option<GitHash>,
pub title: String,
pub version: VersionString,
pub description: Description,
#[serde(default)]
pub assets: Assets,
pub title: String,
pub version: Version,
pub description: Description,
#[serde(default)]
pub build: Option<Vec<String>>,
pub release_notes: String,
pub license: String, // type of license
pub wrapper_repo: Url,
@@ -41,10 +44,23 @@ pub struct Manifest {
pub donation_url: Option<Url>,
#[serde(default)]
pub alerts: Alerts,
pub main: PackageProcedure,
pub health_checks: HealthChecks,
pub config: Option<ConfigActions>,
pub properties: Option<PackageProcedure>,
pub volumes: BTreeMap<VolumeId, Value>,
// #[serde(default)]
// pub interfaces: Interfaces,
// #[serde(default)]
pub backup: BackupActions,
#[serde(default)]
pub migrations: Migrations,
#[serde(default)]
pub actions: BTreeMap<ActionId, Action>,
// #[serde(default)]
// pub permissions: Permissions,
#[serde(default)]
pub dependencies: BTreeMap<PackageId, DepInfo>,
pub config: Option<InOMap<String, Value>>,
#[serde(default)]
pub replaces: Vec<String>,
@@ -53,6 +69,123 @@ pub struct Manifest {
pub hardware_requirements: HardwareRequirements,
}
impl Manifest {
pub fn package_procedures(&self) -> impl Iterator<Item = &PackageProcedure> {
use std::iter::once;
let main = once(&self.main);
let cfg_get = self.config.as_ref().map(|a| &a.get).into_iter();
let cfg_set = self.config.as_ref().map(|a| &a.set).into_iter();
let props = self.properties.iter();
let backups = vec![&self.backup.create, &self.backup.restore].into_iter();
let migrations = self
.migrations
.to
.values()
.chain(self.migrations.from.values());
let actions = self.actions.values().map(|a| &a.implementation);
main.chain(cfg_get)
.chain(cfg_set)
.chain(props)
.chain(backups)
.chain(migrations)
.chain(actions)
}
}
#[derive(Clone, Debug, Deserialize, Serialize, HasModel)]
#[serde(rename_all = "kebab-case")]
#[serde(tag = "type")]
#[model = "Model<Self>"]
pub enum PackageProcedure {
Docker(DockerProcedure),
Script(Value),
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct DockerProcedure {
pub image: ImageId,
#[serde(default)]
pub system: bool,
pub entrypoint: String,
#[serde(default)]
pub args: Vec<String>,
#[serde(default)]
pub inject: bool,
#[serde(default)]
pub mounts: BTreeMap<VolumeId, PathBuf>,
#[serde(default)]
pub io_format: Option<IoFormat>,
#[serde(default)]
pub sigterm_timeout: Option<Duration>,
#[serde(default)]
pub shm_size_mb: Option<usize>, // TODO: use postfix sizing? like 1k vs 1m vs 1g
#[serde(default)]
pub gpu_acceleration: bool,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct HealthChecks(pub BTreeMap<HealthCheckId, HealthCheck>);
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct HealthCheck {
pub name: String,
pub success_message: Option<String>,
#[serde(flatten)]
implementation: PackageProcedure,
pub timeout: Option<Duration>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ConfigActions {
pub get: PackageProcedure,
pub set: PackageProcedure,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct BackupActions {
pub create: PackageProcedure,
pub restore: PackageProcedure,
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct Migrations {
pub from: IndexMap<VersionRange, PackageProcedure>,
pub to: IndexMap<VersionRange, PackageProcedure>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct Action {
pub name: String,
pub description: String,
#[serde(default)]
pub warning: Option<String>,
pub implementation: PackageProcedure,
// pub allowed_statuses: Vec<DockerStatus>,
// #[serde(default)]
// pub input_spec: ConfigSpec,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct DepInfo {
pub version: VersionRange,
pub requirement: DependencyRequirement,
pub description: Option<String>,
#[serde(default)]
pub config: Option<DependencyConfig>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct DependencyConfig {
check: PackageProcedure,
auto_configure: PackageProcedure,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
#[serde(tag = "type")]
@@ -67,15 +200,6 @@ impl DependencyRequirement {
}
}
#[derive(Clone, Debug, Deserialize, Serialize, HasModel)]
#[serde(rename_all = "kebab-case")]
#[model = "Model<Self>"]
pub struct DepInfo {
pub version: VersionRange,
pub requirement: DependencyRequirement,
pub description: Option<String>,
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct Assets {

View File

@@ -6,7 +6,6 @@ use ts_rs::TS;
pub mod builder;
pub mod docker;
pub mod git_hash;
pub mod header;
pub mod manifest;
pub mod reader;

View File

@@ -20,7 +20,7 @@ use super::header::{FileSection, Header, TableOfContents};
use super::SIG_CONTEXT;
use crate::prelude::*;
use crate::s9pk::v1::docker::DockerReader;
use crate::util::Version;
use crate::util::VersionString;
#[pin_project::pin_project]
#[derive(Debug)]
@@ -83,11 +83,11 @@ impl<'a, R: AsyncSeek + Unpin> AsyncSeek for ReadHandle<'a, R> {
pub struct ImageTag {
pub package_id: PackageId,
pub image_id: ImageId,
pub version: Version,
pub version: VersionString,
}
impl ImageTag {
#[instrument(skip_all)]
pub fn validate(&self, id: &PackageId, version: &Version) -> Result<(), Error> {
pub fn validate(&self, id: &PackageId, version: &VersionString) -> Result<(), Error> {
if id != &self.package_id {
return Err(Error::new(
eyre!(

View File

@@ -1,9 +1,10 @@
use std::collections::BTreeSet;
use std::collections::{BTreeMap, BTreeSet};
use std::io::Cursor;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use itertools::Itertools;
use models::ImageId;
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncSeek, AsyncWriteExt};
use tokio::process::Command;
@@ -16,7 +17,7 @@ use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::{FileSource, Section};
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
use crate::s9pk::rpc::SKIP_ENV;
use crate::s9pk::v1::manifest::Manifest as ManifestV1;
use crate::s9pk::v1::manifest::{Manifest as ManifestV1, PackageProcedure};
use crate::s9pk::v1::reader::S9pkReader;
use crate::s9pk::v2::{S9pk, SIG_CONTEXT};
use crate::util::io::TmpDir;
@@ -72,6 +73,17 @@ impl S9pk<Section<MultiCursorFile>> {
let manifest = from_value::<ManifestV1>(manifest_raw.clone())?;
let mut new_manifest = Manifest::from(manifest.clone());
let images: BTreeMap<ImageId, bool> = manifest
.package_procedures()
.filter_map(|p| {
if let PackageProcedure::Docker(p) = p {
Some((p.image.clone(), p.system))
} else {
None
}
})
.collect();
// LICENSE.md
let license: Arc<[u8]> = reader.license().await?.to_vec().await?.into();
archive.insert_path(
@@ -109,61 +121,14 @@ impl S9pk<Section<MultiCursorFile>> {
.input(Some(&mut reader.docker_images(&arch).await?))
.invoke(ErrorKind::Docker)
.await?;
#[derive(serde::Deserialize)]
#[serde(rename_all = "PascalCase")]
struct DockerImagesOut {
repository: Option<String>,
tag: Option<String>,
#[serde(default)]
names: Vec<String>,
}
for image in {
#[cfg(feature = "docker")]
let images = std::str::from_utf8(
&Command::new(CONTAINER_TOOL)
.arg("images")
.arg("--format=json")
.invoke(ErrorKind::Docker)
.await?,
)?
.lines()
.map(|l| serde_json::from_str::<DockerImagesOut>(l))
.collect::<Result<Vec<_>, _>>()
.with_kind(ErrorKind::Deserialization)?
.into_iter();
#[cfg(not(feature = "docker"))]
let images = serde_json::from_slice::<Vec<DockerImagesOut>>(
&Command::new(CONTAINER_TOOL)
.arg("images")
.arg("--format=json")
.invoke(ErrorKind::Docker)
.await?,
)
.with_kind(ErrorKind::Deserialization)?
.into_iter();
images
}
.flat_map(|i| {
if let (Some(repository), Some(tag)) = (i.repository, i.tag) {
vec![format!("{repository}:{tag}")]
for (image, system) in &images {
new_manifest.images.insert(image.clone());
let sqfs_path = images_dir.join(image).with_extension("squashfs");
let image_name = if *system {
format!("start9/{}:latest", image)
} else {
i.names
.into_iter()
.filter_map(|i| i.strip_prefix("docker.io/").map(|s| s.to_owned()))
.collect()
}
})
.filter_map(|i| {
i.strip_suffix(&format!(":{}", manifest.version))
.map(|s| s.to_owned())
})
.filter_map(|i| {
i.strip_prefix(&format!("start9/{}/", manifest.id))
.map(|s| s.to_owned())
}) {
new_manifest.images.insert(image.parse()?);
let sqfs_path = images_dir.join(&image).with_extension("squashfs");
let image_name = format!("start9/{}/{}:{}", manifest.id, image, manifest.version);
format!("start9/{}/{}:{}", manifest.id, image, manifest.version)
};
let id = String::from_utf8(
Command::new(CONTAINER_TOOL)
.arg("create")
@@ -323,6 +288,7 @@ impl S9pk<Section<MultiCursorFile>> {
Ok(S9pk::deserialize(
&MultiCursorFile::from(File::open(destination.as_ref()).await?),
None,
false,
)
.await?)
@@ -337,8 +303,7 @@ impl From<ManifestV1> for Manifest {
title: value.title,
version: value.version,
release_notes: value.release_notes,
license: value.license,
replaces: value.replaces,
license: value.license.into(),
wrapper_repo: value.wrapper_repo,
upstream_repo: value.upstream_repo,
support_site: value.support_site.unwrap_or_else(|| default_url.clone()),

View File

@@ -2,6 +2,7 @@ use std::collections::{BTreeMap, BTreeSet};
use color_eyre::eyre::eyre;
use helpers::const_true;
use imbl_value::InternedString;
pub use models::PackageId;
use models::{ImageId, VolumeId};
use serde::{Deserialize, Serialize};
@@ -10,12 +11,12 @@ use url::Url;
use crate::dependencies::Dependencies;
use crate::prelude::*;
use crate::s9pk::v1::git_hash::GitHash;
use crate::s9pk::git_hash::GitHash;
use crate::util::serde::Regex;
use crate::util::Version;
use crate::util::VersionString;
use crate::version::{Current, VersionT};
fn current_version() -> Version {
fn current_version() -> VersionString {
Current::new().semver().into()
}
@@ -26,12 +27,10 @@ fn current_version() -> Version {
pub struct Manifest {
pub id: PackageId,
pub title: String,
#[ts(type = "string")]
pub version: Version,
pub version: VersionString,
pub release_notes: String,
pub license: String, // type of license
#[serde(default)]
pub replaces: Vec<String>,
#[ts(type = "string")]
pub license: InternedString, // type of license
#[ts(type = "string")]
pub wrapper_repo: Url,
#[ts(type = "string")]
@@ -56,8 +55,7 @@ pub struct Manifest {
#[ts(type = "string | null")]
pub git_hash: Option<GitHash>,
#[serde(default = "current_version")]
#[ts(type = "string")]
pub os_version: Version,
pub os_version: VersionString,
#[serde(default = "const_true")]
pub has_config: bool,
}
@@ -68,9 +66,11 @@ pub struct Manifest {
pub struct HardwareRequirements {
#[serde(default)]
#[ts(type = "{ [key: string]: string }")]
device: BTreeMap<String, Regex>,
ram: Option<u64>,
pub arch: Option<Vec<String>>,
pub device: BTreeMap<String, Regex>,
#[ts(type = "number | null")]
pub ram: Option<u64>,
#[ts(type = "string[] | null")]
pub arch: Option<BTreeSet<InternedString>>,
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]

View File

@@ -7,6 +7,7 @@ use models::{mime, DataUrl, PackageId};
use tokio::fs::File;
use crate::prelude::*;
use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment;
use crate::s9pk::manifest::Manifest;
use crate::s9pk::merkle_archive::file_contents::FileContents;
use crate::s9pk::merkle_archive::sink::Sink;
@@ -96,7 +97,7 @@ impl<S> S9pk<S> {
}
}
impl<S: FileSource> S9pk<S> {
impl<S: FileSource + Clone> S9pk<S> {
pub async fn new(archive: MerkleArchive<S>, size: Option<u64>) -> Result<Self, Error> {
let manifest = extract_manifest(&archive).await?;
Ok(Self {
@@ -173,9 +174,13 @@ impl<S: FileSource> S9pk<S> {
}
}
impl<S: ArchiveSource> S9pk<Section<S>> {
impl<S: ArchiveSource + Clone> S9pk<Section<S>> {
#[instrument(skip_all)]
pub async fn deserialize(source: &S, apply_filter: bool) -> Result<Self, Error> {
pub async fn deserialize(
source: &S,
commitment: Option<&MerkleArchiveCommitment>,
apply_filter: bool,
) -> Result<Self, Error> {
use tokio::io::AsyncReadExt;
let mut header = source
@@ -193,7 +198,8 @@ impl<S: ArchiveSource> S9pk<Section<S>> {
"Invalid Magic or Unexpected Version"
);
let mut archive = MerkleArchive::deserialize(source, SIG_CONTEXT, &mut header).await?;
let mut archive =
MerkleArchive::deserialize(source, SIG_CONTEXT, &mut header, commitment).await?;
if apply_filter {
archive.filter(filter)?;
@@ -211,7 +217,7 @@ impl<S: ArchiveSource> S9pk<Section<S>> {
}
impl S9pk {
pub async fn from_file(file: File, apply_filter: bool) -> Result<Self, Error> {
Self::deserialize(&MultiCursorFile::from(file), apply_filter).await
Self::deserialize(&MultiCursorFile::from(file), None, apply_filter).await
}
pub async fn open(
path: impl AsRef<Path>,