Feature/registry package index (#2623)

* include system images in compat s9pk

* wip

* wip

* update types

* wip

* fix signature serialization

* Add SignatureHeader conversions

* finish display impl for get

---------

Co-authored-by: Shadowy Super Coder <musashidisciple@proton.me>
This commit is contained in:
Aiden McClelland
2024-05-31 12:13:23 -06:00
committed by GitHub
parent 0ccbb52c1f
commit fd7c2fbe93
113 changed files with 3265 additions and 1436 deletions

View File

@@ -1,4 +1,4 @@
use std::collections::VecDeque;
use std::collections::{BTreeSet, VecDeque};
use std::future::Future;
use std::io::Cursor;
use std::os::unix::prelude::MetadataExt;
@@ -19,7 +19,7 @@ use tokio::io::{
duplex, AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, DuplexStream, ReadBuf, WriteHalf,
};
use tokio::net::TcpStream;
use tokio::sync::Notify;
use tokio::sync::{Notify, OwnedMutexGuard};
use tokio::time::{Instant, Sleep};
use crate::prelude::*;
@@ -804,7 +804,7 @@ pub struct TeeWriter<W1, W2> {
#[pin]
writer2: W2,
}
impl<W1: AsyncWrite, W2: AsyncWrite> TeeWriter<W1, W2> {
impl<W1, W2> TeeWriter<W1, W2> {
pub fn new(writer1: W1, writer2: W2, capacity: usize) -> Self {
Self {
capacity,
@@ -815,7 +815,6 @@ impl<W1: AsyncWrite, W2: AsyncWrite> TeeWriter<W1, W2> {
}
}
}
impl<W1: AsyncWrite + Unpin, W2: AsyncWrite + Unpin> TeeWriter<W1, W2> {
pub async fn into_inner(mut self) -> Result<(W1, W2), Error> {
self.flush().await?;
@@ -1007,3 +1006,114 @@ impl AsyncWrite for ParallelBlake3Writer {
Poll::Pending
}
}
#[pin_project::pin_project]
pub struct TrackingIO<T> {
position: u64,
#[pin]
io: T,
}
impl<T> TrackingIO<T> {
pub fn new(start: u64, io: T) -> Self {
Self {
position: start,
io,
}
}
pub fn position(&self) -> u64 {
self.position
}
pub fn into_inner(self) -> T {
self.io
}
}
impl<W: AsyncWrite> AsyncWrite for TrackingIO<W> {
fn poll_write(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &[u8],
) -> std::task::Poll<Result<usize, std::io::Error>> {
let this = self.project();
let written = futures::ready!(this.io.poll_write(cx, buf)?);
*this.position += written as u64;
Poll::Ready(Ok(written))
}
fn poll_flush(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), std::io::Error>> {
self.project().io.poll_flush(cx)
}
fn poll_shutdown(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), std::io::Error>> {
self.project().io.poll_shutdown(cx)
}
}
impl<R: AsyncRead> AsyncRead for TrackingIO<R> {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
let this = self.project();
let start = buf.filled().len();
futures::ready!(this.io.poll_read(cx, buf)?);
*this.position += (buf.filled().len() - start) as u64;
Poll::Ready(Ok(()))
}
}
impl<T> std::cmp::PartialEq for TrackingIO<T> {
fn eq(&self, other: &Self) -> bool {
self.position.eq(&other.position)
}
}
impl<T> std::cmp::Eq for TrackingIO<T> {}
impl<T> std::cmp::PartialOrd for TrackingIO<T> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.position.partial_cmp(&other.position)
}
}
impl<T> std::cmp::Ord for TrackingIO<T> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.position.cmp(&other.position)
}
}
impl<T> std::borrow::Borrow<u64> for TrackingIO<T> {
fn borrow(&self) -> &u64 {
&self.position
}
}
pub struct MutexIO<T>(OwnedMutexGuard<T>);
impl<R: AsyncRead + Unpin> AsyncRead for MutexIO<R> {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
Pin::new(&mut *self.get_mut().0).poll_read(cx, buf)
}
}
impl<W: AsyncWrite + Unpin> AsyncWrite for MutexIO<W> {
fn poll_write(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, std::io::Error>> {
Pin::new(&mut *self.get_mut().0).poll_write(cx, buf)
}
fn poll_flush(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Result<(), std::io::Error>> {
Pin::new(&mut *self.get_mut().0).poll_flush(cx)
}
fn poll_shutdown(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Result<(), std::io::Error>> {
Pin::new(&mut *self.get_mut().0).poll_shutdown(cx)
}
}

View File

@@ -1,12 +1,13 @@
use models::{Error, ResultExt};
use serde::{Deserialize, Serialize};
use tokio::process::Command;
use ts_rs::TS;
use crate::util::Invoke;
const KNOWN_CLASSES: &[&str] = &["processor", "display"];
#[derive(Debug, Deserialize, Serialize)]
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(tag = "class")]
#[serde(rename_all = "camelCase")]
pub enum LshwDevice {
@@ -28,12 +29,12 @@ impl LshwDevice {
}
}
#[derive(Debug, Deserialize, Serialize)]
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
pub struct LshwProcessor {
pub product: String,
}
#[derive(Debug, Deserialize, Serialize)]
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
pub struct LshwDisplay {
pub product: String,
}

View File

@@ -15,7 +15,7 @@ use helpers::canonicalize;
pub use helpers::NonDetachingJoinHandle;
use imbl_value::InternedString;
use lazy_static::lazy_static;
pub use models::Version;
pub use models::VersionString;
use pin_project::pin_project;
use sha2::Digest;
use tokio::fs::File;

View File

@@ -1,3 +1,4 @@
use std::path::Path;
use clap::Parser;
use rpc_toolkit::{from_fn_async, Context, ParentHandler};
@@ -9,9 +10,11 @@ use crate::context::CliContext;
use crate::prelude::*;
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource};
use crate::s9pk::merkle_archive::source::ArchiveSource;
use crate::util::io::ParallelBlake3Writer;
use crate::util::serde::Base16;
use crate::util::Apply;
use crate::CAP_10_MiB;
pub fn util<C: Context>() -> ParentHandler<C> {
ParentHandler::new().subcommand("b3sum", from_fn_async(b3sum))
@@ -28,26 +31,29 @@ pub async fn b3sum(
ctx: CliContext,
B3sumParams { file, allow_mmap }: B3sumParams,
) -> Result<Base16<[u8; 32]>, Error> {
let source = if let Ok(url) = file.parse::<Url>() {
async fn b3sum_source<S: ArchiveSource>(source: S) -> Result<Base16<[u8; 32]>, Error> {
let mut hasher = ParallelBlake3Writer::new(CAP_10_MiB);
source.copy_all_to(&mut hasher).await?;
hasher.finalize().await.map(|h| *h.as_bytes()).map(Base16)
}
async fn b3sum_file(
path: impl AsRef<Path>,
allow_mmap: bool,
) -> Result<Base16<[u8; 32]>, Error> {
let file = MultiCursorFile::from(File::open(path).await?);
if allow_mmap {
return file.blake3_mmap().await.map(|h| *h.as_bytes()).map(Base16);
}
b3sum_source(file).await
}
if let Ok(url) = file.parse::<Url>() {
if url.scheme() == "file" {
let file = MultiCursorFile::from(File::open(url.path()).await?);
if allow_mmap {
return file.blake3_mmap().await.map(|h| *h.as_bytes()).map(Base16);
}
DynFileSource::new(file.section(
0,
file.size().await.ok_or_else(|| {
Error::new(eyre!("failed to get file size"), ErrorKind::Filesystem)
})?,
))
b3sum_file(url.path(), allow_mmap).await
} else if url.scheme() == "http" || url.scheme() == "https" {
let file = HttpSource::new(ctx.client.clone(), url).await?;
DynFileSource::new(file.section(
0,
file.size().await.ok_or_else(|| {
Error::new(eyre!("failed to get file size"), ErrorKind::Filesystem)
})?,
))
HttpSource::new(ctx.client.clone(), url)
.await?
.apply(b3sum_source)
.await
} else {
return Err(Error::new(
eyre!("unknown scheme: {}", url.scheme()),
@@ -55,18 +61,6 @@ pub async fn b3sum(
));
}
} else {
let file = MultiCursorFile::from(File::open(file).await?);
if allow_mmap {
return file.blake3_mmap().await.map(|h| *h.as_bytes()).map(Base16);
}
DynFileSource::new(file.section(
0,
file.size().await.ok_or_else(|| {
Error::new(eyre!("failed to get file size"), ErrorKind::Filesystem)
})?,
))
};
let mut hasher = ParallelBlake3Writer::new(crate::s9pk::merkle_archive::hash::BUFFER_CAPACITY);
source.copy(&mut hasher).await?;
hasher.finalize().await.map(|h| *h.as_bytes()).map(Base16)
b3sum_file(file, allow_mmap).await
}
}

View File

@@ -22,6 +22,7 @@ use ts_rs::TS;
use super::IntoDoubleEndedIterator;
use crate::prelude::*;
use crate::util::Apply;
use crate::util::clap::FromStrParser;
pub fn deserialize_from_str<
@@ -999,6 +1000,11 @@ impl<T: AsRef<[u8]>> std::fmt::Display for Base16<T> {
#[derive(TS)]
#[ts(type = "string", concrete(T = Vec<u8>))]
pub struct Base32<T>(pub T);
impl<T: AsRef<[u8]>> std::fmt::Display for Base32<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
base32::encode(base32::Alphabet::RFC4648 { padding: true }, self.0.as_ref()).fmt(f)
}
}
impl<'de, T: TryFrom<Vec<u8>>> Deserialize<'de> for Base32<T> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
@@ -1022,32 +1028,35 @@ impl<T: AsRef<[u8]>> Serialize for Base32<T> {
where
S: Serializer,
{
serializer.serialize_str(&base32::encode(
base32::Alphabet::RFC4648 { padding: true },
self.0.as_ref(),
))
}
}
impl<T: AsRef<[u8]>> std::fmt::Display for Base32<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
base32::encode(base32::Alphabet::RFC4648 { padding: true }, self.0.as_ref()).fmt(f)
serialize_display(self, serializer)
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, TS)]
#[ts(type = "string", concrete(T = Vec<u8>))]
pub struct Base64<T>(pub T);
impl<T: AsRef<[u8]>> std::fmt::Display for Base64<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&base64::encode(self.0.as_ref()))
}
}
impl<T: TryFrom<Vec<u8>>> FromStr for Base64<T>
{
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
base64::decode(&s)
.with_kind(ErrorKind::Deserialization)?
.apply(TryFrom::try_from)
.map(Self)
.map_err(|_| Error::new(eyre!("failed to create from buffer"), ErrorKind::Deserialization))
}
}
impl<'de, T: TryFrom<Vec<u8>>> Deserialize<'de> for Base64<T> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
base64::decode(&s)
.map_err(serde::de::Error::custom)?
.try_into()
.map_err(|_| serde::de::Error::custom("invalid length"))
.map(Self)
deserialize_from_str(deserializer)
}
}
impl<T: AsRef<[u8]>> Serialize for Base64<T> {
@@ -1055,7 +1064,7 @@ impl<T: AsRef<[u8]>> Serialize for Base64<T> {
where
S: Serializer,
{
serializer.serialize_str(&base64::encode(self.0.as_ref()))
serialize_display(self, serializer)
}
}
impl<T> Deref for Base64<T> {