Feature/UI sideload (#2658)

* ui sideloading

* remove subtlecrypto import

* fix parser

* misc fixes

* allow docker pull during compat conversion
This commit is contained in:
Aiden McClelland
2024-06-28 15:03:01 -06:00
committed by GitHub
parent c16d8a1da1
commit 822dd5e100
101 changed files with 1901 additions and 797 deletions

View File

@@ -274,6 +274,21 @@ impl<S: FileSource + Clone> DirectoryContents<S> {
((_, a), (_, b), _) if !a.as_contents().is_dir() && b.as_contents().is_dir() => {
std::cmp::Ordering::Greater
}
((_, a), (_, b), _)
if a.as_contents().is_missing() && !b.as_contents().is_missing() =>
{
std::cmp::Ordering::Greater
}
((_, a), (_, b), _)
if !a.as_contents().is_missing() && b.as_contents().is_missing() =>
{
std::cmp::Ordering::Less
}
((n_a, a), (n_b, b), _)
if a.as_contents().is_missing() && b.as_contents().is_missing() =>
{
n_a.cmp(n_b)
}
((a, _), (b, _), Some(sort_by)) => sort_by(&***a, &***b),
_ => std::cmp::Ordering::Equal,
}) {

View File

@@ -121,14 +121,14 @@ impl<S: ArchiveSource + Clone> MerkleArchive<Section<S>> {
}
if max_size > *root_maxsize {
return Err(Error::new(
eyre!("merkle root directory max size too large"),
eyre!("root directory max size too large"),
ErrorKind::InvalidSignature,
));
}
} else {
if max_size > CAP_1_MiB as u64 {
return Err(Error::new(
eyre!("merkle root directory max size over 1MiB, cancelling download in case of DOS attack"),
eyre!("root directory max size over 1MiB, cancelling download in case of DOS attack"),
ErrorKind::InvalidSignature,
));
}
@@ -377,6 +377,9 @@ impl<S> EntryContents<S> {
pub fn is_dir(&self) -> bool {
matches!(self, &EntryContents::Directory(_))
}
pub fn is_missing(&self) -> bool {
matches!(self, &EntryContents::Missing)
}
}
impl<S: ArchiveSource + Clone> EntryContents<Section<S>> {
#[instrument(skip_all)]

View File

@@ -4,7 +4,7 @@ use std::sync::{Arc, Mutex};
use std::task::Poll;
use bytes::Bytes;
use futures::{Stream, StreamExt, TryStreamExt};
use futures::{Stream, TryStreamExt};
use reqwest::header::{ACCEPT_RANGES, CONTENT_LENGTH, RANGE};
use reqwest::{Client, Url};
use tokio::io::{AsyncRead, AsyncReadExt, ReadBuf, Take};
@@ -54,11 +54,12 @@ impl HttpSource {
}
}
impl ArchiveSource for HttpSource {
type Reader = HttpReader;
type FetchReader = HttpReader;
type FetchAllReader = StreamReader<BoxStream<'static, Result<Bytes, std::io::Error>>, Bytes>;
async fn size(&self) -> Option<u64> {
self.size
}
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
async fn fetch_all(&self) -> Result<Self::FetchAllReader, Error> {
Ok(StreamReader::new(
self.client
.get(self.url.clone())
@@ -72,7 +73,7 @@ impl ArchiveSource for HttpSource {
.apply(boxed),
))
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
async fn fetch(&self, position: u64, size: u64) -> Result<Self::FetchReader, Error> {
match &self.range_support {
Ok(_) => Ok(HttpReader::Range(
StreamReader::new(if size > 0 {

View File

@@ -10,6 +10,7 @@ use tokio::io::{AsyncRead, AsyncWrite};
use crate::prelude::*;
use crate::s9pk::merkle_archive::hash::VerifyingWriter;
use crate::util::io::{open_file, TmpDir};
pub mod http;
pub mod multi_cursor_file;
@@ -159,7 +160,7 @@ impl FileSource for PathBuf {
Ok(tokio::fs::metadata(self).await?.len())
}
async fn reader(&self) -> Result<Self::Reader, Error> {
Ok(File::open(self).await?)
Ok(open_file(self).await?)
}
}
@@ -180,18 +181,17 @@ impl FileSource for Arc<[u8]> {
}
pub trait ArchiveSource: Send + Sync + Sized + 'static {
type Reader: AsyncRead + Unpin + Send;
type FetchReader: AsyncRead + Unpin + Send;
type FetchAllReader: AsyncRead + Unpin + Send;
fn size(&self) -> impl Future<Output = Option<u64>> + Send {
async { None }
}
fn fetch_all(
&self,
) -> impl Future<Output = Result<impl AsyncRead + Unpin + Send, Error>> + Send;
fn fetch_all(&self) -> impl Future<Output = Result<Self::FetchAllReader, Error>> + Send;
fn fetch(
&self,
position: u64,
size: u64,
) -> impl Future<Output = Result<Self::Reader, Error>> + Send;
) -> impl Future<Output = Result<Self::FetchReader, Error>> + Send;
fn copy_all_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
w: &mut W,
@@ -222,14 +222,15 @@ pub trait ArchiveSource: Send + Sync + Sized + 'static {
}
impl<T: ArchiveSource> ArchiveSource for Arc<T> {
type Reader = T::Reader;
type FetchReader = T::FetchReader;
type FetchAllReader = T::FetchAllReader;
async fn size(&self) -> Option<u64> {
self.deref().size().await
}
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
async fn fetch_all(&self) -> Result<Self::FetchAllReader, Error> {
self.deref().fetch_all().await
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
async fn fetch(&self, position: u64, size: u64) -> Result<Self::FetchReader, Error> {
self.deref().fetch(position, size).await
}
async fn copy_all_to<W: AsyncWrite + Unpin + Send + ?Sized>(
@@ -249,11 +250,12 @@ impl<T: ArchiveSource> ArchiveSource for Arc<T> {
}
impl ArchiveSource for Arc<[u8]> {
type Reader = tokio::io::Take<std::io::Cursor<Self>>;
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
type FetchReader = tokio::io::Take<std::io::Cursor<Self>>;
type FetchAllReader = std::io::Cursor<Self>;
async fn fetch_all(&self) -> Result<Self::FetchAllReader, Error> {
Ok(std::io::Cursor::new(self.clone()))
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
async fn fetch(&self, position: u64, size: u64) -> Result<Self::FetchReader, Error> {
use tokio::io::AsyncReadExt;
let mut cur = std::io::Cursor::new(self.clone());
@@ -269,7 +271,7 @@ pub struct Section<S> {
size: u64,
}
impl<S: ArchiveSource> FileSource for Section<S> {
type Reader = S::Reader;
type Reader = S::FetchReader;
async fn size(&self) -> Result<u64, Error> {
Ok(self.size)
}
@@ -285,3 +287,81 @@ pub type DynRead = Box<dyn AsyncRead + Unpin + Send + Sync + 'static>;
pub fn into_dyn_read<R: AsyncRead + Unpin + Send + Sync + 'static>(r: R) -> DynRead {
Box::new(r)
}
#[derive(Clone)]
pub struct TmpSource<S> {
tmp_dir: Arc<TmpDir>,
source: S,
}
impl<S> TmpSource<S> {
pub fn new(tmp_dir: Arc<TmpDir>, source: S) -> Self {
Self { tmp_dir, source }
}
pub async fn gc(self) -> Result<(), Error> {
self.tmp_dir.gc().await
}
}
impl<S> std::ops::Deref for TmpSource<S> {
type Target = S;
fn deref(&self) -> &Self::Target {
&self.source
}
}
impl<S: ArchiveSource> ArchiveSource for TmpSource<S> {
type FetchReader = <S as ArchiveSource>::FetchReader;
type FetchAllReader = <S as ArchiveSource>::FetchAllReader;
async fn size(&self) -> Option<u64> {
self.source.size().await
}
async fn fetch_all(&self) -> Result<Self::FetchAllReader, Error> {
self.source.fetch_all().await
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::FetchReader, Error> {
self.source.fetch(position, size).await
}
async fn copy_all_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
w: &mut W,
) -> Result<(), Error> {
self.source.copy_all_to(w).await
}
async fn copy_to<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
position: u64,
size: u64,
w: &mut W,
) -> Result<(), Error> {
self.source.copy_to(position, size, w).await
}
}
impl<S: FileSource> From<TmpSource<S>> for DynFileSource {
fn from(value: TmpSource<S>) -> Self {
DynFileSource::new(value)
}
}
impl<S: FileSource> FileSource for TmpSource<S> {
type Reader = <S as FileSource>::Reader;
async fn size(&self) -> Result<u64, Error> {
self.source.size().await
}
async fn reader(&self) -> Result<Self::Reader, Error> {
self.source.reader().await
}
async fn copy<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
mut w: &mut W,
) -> Result<(), Error> {
self.source.copy(&mut w).await
}
async fn copy_verify<W: AsyncWrite + Unpin + Send + ?Sized>(
&self,
mut w: &mut W,
verify: Option<(Hash, u64)>,
) -> Result<(), Error> {
self.source.copy_verify(&mut w, verify).await
}
async fn to_vec(&self, verify: Option<(Hash, u64)>) -> Result<Vec<u8>, Error> {
self.source.to_vec(verify).await
}
}

View File

@@ -6,12 +6,13 @@ use std::sync::Arc;
use std::task::Poll;
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncReadExt, ReadBuf, Take};
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, ReadBuf, Take};
use tokio::sync::{Mutex, OwnedMutexGuard};
use crate::disk::mount::filesystem::loop_dev::LoopDev;
use crate::prelude::*;
use crate::s9pk::merkle_archive::source::{ArchiveSource, Section};
use crate::util::io::open_file;
fn path_from_fd(fd: RawFd) -> Result<PathBuf, Error> {
#[cfg(target_os = "linux")]
@@ -42,7 +43,7 @@ impl MultiCursorFile {
path_from_fd(self.fd)
}
pub async fn open(fd: &impl AsRawFd) -> Result<Self, Error> {
let f = File::open(path_from_fd(fd.as_raw_fd())?).await?;
let f = open_file(path_from_fd(fd.as_raw_fd())?).await?;
Ok(Self::from(f))
}
pub async fn cursor(&self) -> Result<FileCursor, Error> {
@@ -50,7 +51,7 @@ impl MultiCursorFile {
if let Ok(file) = self.file.clone().try_lock_owned() {
file
} else {
Arc::new(Mutex::new(File::open(self.path()?).await?))
Arc::new(Mutex::new(open_file(self.path()?).await?))
.try_lock_owned()
.expect("freshly created")
},
@@ -88,24 +89,48 @@ impl AsyncRead for FileCursor {
Pin::new(&mut (&mut **this.0.get_mut())).poll_read(cx, buf)
}
}
impl AsyncSeek for FileCursor {
fn start_seek(self: Pin<&mut Self>, position: SeekFrom) -> std::io::Result<()> {
let this = self.project();
Pin::new(&mut (&mut **this.0.get_mut())).start_seek(position)
}
fn poll_complete(
self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<std::io::Result<u64>> {
let this = self.project();
Pin::new(&mut (&mut **this.0.get_mut())).poll_complete(cx)
}
}
impl std::ops::Deref for FileCursor {
type Target = File;
fn deref(&self) -> &Self::Target {
&*self.0
}
}
impl std::ops::DerefMut for FileCursor {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut *self.0
}
}
impl ArchiveSource for MultiCursorFile {
type Reader = Take<FileCursor>;
type FetchReader = Take<FileCursor>;
type FetchAllReader = FileCursor;
async fn size(&self) -> Option<u64> {
tokio::fs::metadata(self.path().ok()?)
.await
.ok()
.map(|m| m.len())
}
#[allow(refining_impl_trait)]
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send + 'static, Error> {
async fn fetch_all(&self) -> Result<Self::FetchAllReader, Error> {
use tokio::io::AsyncSeekExt;
let mut file = self.cursor().await?;
file.0.seek(SeekFrom::Start(0)).await?;
Ok(file)
}
async fn fetch(&self, position: u64, size: u64) -> Result<Self::Reader, Error> {
async fn fetch(&self, position: u64, size: u64) -> Result<Self::FetchReader, Error> {
use tokio::io::AsyncSeekExt;
let mut file = self.cursor().await?;

View File

@@ -3,7 +3,7 @@ use tokio::io::{AsyncRead, AsyncWrite};
use crate::prelude::*;
/// Most-significant byte, == 0x80
/// Most-significant bit, == 0x80
pub const MSB: u8 = 0b1000_0000;
const MAX_STR_LEN: u64 = 1024 * 1024; // 1 MiB
@@ -39,22 +39,20 @@ pub async fn serialize_varstring<W: AsyncWrite + Unpin + Send>(
Ok(())
}
const MAX_SIZE: usize = (std::mem::size_of::<u64>() * 8 + 7) / 7;
#[derive(Default)]
struct VarIntProcessor {
buf: [u8; 10],
maxsize: usize,
buf: [u8; MAX_SIZE],
i: usize,
}
impl VarIntProcessor {
fn new() -> VarIntProcessor {
VarIntProcessor {
maxsize: (std::mem::size_of::<u64>() * 8 + 7) / 7,
..VarIntProcessor::default()
}
Self::default()
}
fn push(&mut self, b: u8) -> Result<(), Error> {
if self.i >= self.maxsize {
if self.i >= MAX_SIZE {
return Err(Error::new(
eyre!("Unterminated varint"),
ErrorKind::ParseS9pk,

View File

@@ -4,37 +4,57 @@ pub mod rpc;
pub mod v1;
pub mod v2;
use std::io::SeekFrom;
use std::path::Path;
use std::sync::Arc;
use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncSeekExt};
use tokio::io::{AsyncReadExt, AsyncSeek};
pub use v2::{manifest, S9pk};
use crate::context::CliContext;
use crate::prelude::*;
use crate::progress::FullProgressTracker;
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource};
use crate::s9pk::v1::reader::S9pkReader;
use crate::s9pk::v2::compat::MAGIC_AND_VERSION;
use crate::util::io::TmpDir;
pub async fn load(ctx: &CliContext, path: impl AsRef<Path>) -> Result<File, Error> {
pub async fn load<S, K>(
source: S,
key: K,
progress: Option<&FullProgressTracker>,
) -> Result<S9pk<DynFileSource>, Error>
where
S: ArchiveSource,
S::FetchAllReader: AsyncSeek + Sync,
K: FnOnce() -> Result<ed25519_dalek::SigningKey, Error>,
{
// TODO: return s9pk
const MAGIC_LEN: usize = MAGIC_AND_VERSION.len();
let mut magic = [0_u8; MAGIC_LEN];
let mut file = tokio::fs::File::open(&path).await?;
file.read_exact(&mut magic).await?;
file.seek(SeekFrom::Start(0)).await?;
source.fetch(0, 3).await?.read_exact(&mut magic).await?;
if magic == v2::compat::MAGIC_AND_VERSION {
let phase = if let Some(progress) = progress {
let mut phase = progress.add_phase(
"Converting Package to V2".into(),
Some(source.size().await.unwrap_or(60)),
);
phase.start();
Some(phase)
} else {
None
};
tracing::info!("Converting package to v2 s9pk");
let new_path = path.as_ref().with_extension("compat.s9pk");
S9pk::from_v1(
S9pkReader::from_reader(file, true).await?,
&new_path,
ctx.developer_key()?.clone(),
let tmp_dir = TmpDir::new().await?;
let s9pk = S9pk::from_v1(
S9pkReader::from_reader(source.fetch_all().await?, true).await?,
Arc::new(tmp_dir),
key()?,
)
.await?;
tokio::fs::rename(&new_path, &path).await?;
file = tokio::fs::File::open(&path).await?;
tracing::info!("Converted s9pk successfully");
if let Some(mut phase) = phase {
phase.complete();
}
Ok(s9pk.into_dyn())
} else {
Ok(S9pk::deserialize(&Arc::new(source), None).await?.into_dyn())
}
Ok(file)
}

View File

@@ -1,19 +1,19 @@
use std::path::PathBuf;
use std::sync::Arc;
use clap::Parser;
use models::ImageId;
use rpc_toolkit::{from_fn_async, Empty, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use ts_rs::TS;
use crate::context::CliContext;
use crate::prelude::*;
use crate::s9pk::manifest::Manifest;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::v2::pack::ImageConfig;
use crate::s9pk::v2::SIG_CONTEXT;
use crate::s9pk::S9pk;
use crate::util::io::TmpDir;
use crate::util::io::{create_file, open_file, TmpDir};
use crate::util::serde::{apply_expr, HandlerExtSerde};
pub const SKIP_ENV: &[&str] = &["TERM", "container", "HOME", "HOSTNAME"];
@@ -79,19 +79,25 @@ async fn add_image(
AddImageParams { id, config }: AddImageParams,
S9pkPath { s9pk: s9pk_path }: S9pkPath,
) -> Result<(), Error> {
let mut s9pk = S9pk::from_file(super::load(&ctx, &s9pk_path).await?)
.await?
.into_dyn();
let mut s9pk = super::load(
MultiCursorFile::from(open_file(&s9pk_path).await?),
|| ctx.developer_key().cloned(),
None,
)
.await?;
s9pk.as_manifest_mut().images.insert(id, config);
let tmpdir = TmpDir::new().await?;
s9pk.load_images(&tmpdir).await?;
let tmp_dir = Arc::new(TmpDir::new().await?);
s9pk.load_images(tmp_dir.clone()).await?;
s9pk.validate_and_filter(None)?;
let tmp_path = s9pk_path.with_extension("s9pk.tmp");
let mut tmp_file = File::create(&tmp_path).await?;
let mut tmp_file = create_file(&tmp_path).await?;
s9pk.serialize(&mut tmp_file, true).await?;
drop(s9pk);
tmp_file.sync_all().await?;
tokio::fs::rename(&tmp_path, &s9pk_path).await?;
tmp_dir.gc().await?;
Ok(())
}
@@ -104,13 +110,18 @@ async fn edit_manifest(
EditManifestParams { expression }: EditManifestParams,
S9pkPath { s9pk: s9pk_path }: S9pkPath,
) -> Result<Manifest, Error> {
let mut s9pk = S9pk::from_file(super::load(&ctx, &s9pk_path).await?).await?;
let mut s9pk = super::load(
MultiCursorFile::from(open_file(&s9pk_path).await?),
|| ctx.developer_key().cloned(),
None,
)
.await?;
let old = serde_json::to_value(s9pk.as_manifest()).with_kind(ErrorKind::Serialization)?;
*s9pk.as_manifest_mut() = serde_json::from_value(apply_expr(old.into(), &expression)?.into())
.with_kind(ErrorKind::Serialization)?;
let manifest = s9pk.as_manifest().clone();
let tmp_path = s9pk_path.with_extension("s9pk.tmp");
let mut tmp_file = File::create(&tmp_path).await?;
let mut tmp_file = create_file(&tmp_path).await?;
s9pk.as_archive_mut()
.set_signer(ctx.developer_key()?.clone(), SIG_CONTEXT);
s9pk.serialize(&mut tmp_file, true).await?;
@@ -123,9 +134,14 @@ async fn edit_manifest(
async fn file_tree(
ctx: CliContext,
_: Empty,
S9pkPath { s9pk }: S9pkPath,
S9pkPath { s9pk: s9pk_path }: S9pkPath,
) -> Result<Vec<PathBuf>, Error> {
let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?;
let s9pk = super::load(
MultiCursorFile::from(open_file(&s9pk_path).await?),
|| ctx.developer_key().cloned(),
None,
)
.await?;
Ok(s9pk.as_archive().contents().file_paths(""))
}
@@ -138,11 +154,16 @@ struct CatParams {
async fn cat(
ctx: CliContext,
CatParams { file_path }: CatParams,
S9pkPath { s9pk }: S9pkPath,
S9pkPath { s9pk: s9pk_path }: S9pkPath,
) -> Result<(), Error> {
use crate::s9pk::merkle_archive::source::FileSource;
let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?;
let s9pk = super::load(
MultiCursorFile::from(open_file(&s9pk_path).await?),
|| ctx.developer_key().cloned(),
None,
)
.await?;
tokio::io::copy(
&mut s9pk
.as_archive()
@@ -162,8 +183,13 @@ async fn cat(
async fn inspect_manifest(
ctx: CliContext,
_: Empty,
S9pkPath { s9pk }: S9pkPath,
S9pkPath { s9pk: s9pk_path }: S9pkPath,
) -> Result<Manifest, Error> {
let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?;
let s9pk = super::load(
MultiCursorFile::from(open_file(&s9pk_path).await?),
|| ctx.developer_key().cloned(),
None,
)
.await?;
Ok(s9pk.as_manifest().clone())
}

View File

@@ -1,8 +1,7 @@
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use emver::VersionRange;
use imbl_value::InOMap;
use exver::{Version, VersionRange};
use indexmap::IndexMap;
pub use models::PackageId;
use models::{ActionId, HealthCheckId, ImageId, VolumeId};
@@ -13,23 +12,16 @@ use crate::prelude::*;
use crate::s9pk::git_hash::GitHash;
use crate::s9pk::manifest::{Alerts, Description, HardwareRequirements};
use crate::util::serde::{Duration, IoFormat};
use crate::util::VersionString;
use crate::version::{Current, VersionT};
fn current_version() -> VersionString {
Current::new().semver().into()
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct Manifest {
#[serde(default = "current_version")]
pub eos_version: VersionString,
pub eos_version: Version,
pub id: PackageId,
#[serde(default)]
pub git_hash: Option<GitHash>,
pub title: String,
pub version: VersionString,
pub version: exver::emver::Version,
pub description: Description,
#[serde(default)]
pub assets: Assets,

View File

@@ -20,6 +20,7 @@ use super::header::{FileSection, Header, TableOfContents};
use super::SIG_CONTEXT;
use crate::prelude::*;
use crate::s9pk::v1::docker::DockerReader;
use crate::util::io::open_file;
use crate::util::VersionString;
#[pin_project::pin_project]
@@ -150,9 +151,7 @@ pub struct S9pkReader<R: AsyncRead + AsyncSeek + Unpin + Send + Sync = BufReader
impl S9pkReader {
pub async fn open<P: AsRef<Path>>(path: P, check_sig: bool) -> Result<Self, Error> {
let p = path.as_ref();
let rdr = File::open(p)
.await
.with_ctx(|_| (crate::error::ErrorKind::Filesystem, p.display().to_string()))?;
let rdr = open_file(p).await?;
Self::from_reader(BufReader::new(rdr), check_sig).await
}

View File

@@ -2,9 +2,8 @@ use std::collections::BTreeMap;
use std::path::Path;
use std::sync::Arc;
use itertools::Itertools;
use exver::ExtendedVersion;
use models::ImageId;
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncSeek, AsyncWriteExt};
use tokio::process::Command;
@@ -12,29 +11,35 @@ use crate::dependencies::{DepInfo, Dependencies};
use crate::prelude::*;
use crate::s9pk::manifest::Manifest;
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::Section;
use crate::s9pk::merkle_archive::source::TmpSource;
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
use crate::s9pk::rpc::SKIP_ENV;
use crate::s9pk::v1::manifest::{Manifest as ManifestV1, PackageProcedure};
use crate::s9pk::v1::reader::S9pkReader;
use crate::s9pk::v2::pack::{PackSource, CONTAINER_TOOL};
use crate::s9pk::v2::pack::{ImageSource, PackSource, CONTAINER_TOOL};
use crate::s9pk::v2::{S9pk, SIG_CONTEXT};
use crate::util::io::TmpDir;
use crate::util::io::{create_file, TmpDir};
use crate::util::Invoke;
pub const MAGIC_AND_VERSION: &[u8] = &[0x3b, 0x3b, 0x01];
impl S9pk<Section<MultiCursorFile>> {
impl S9pk<TmpSource<PackSource>> {
#[instrument(skip_all)]
pub async fn from_v1<R: AsyncRead + AsyncSeek + Unpin + Send + Sync>(
mut reader: S9pkReader<R>,
destination: impl AsRef<Path>,
tmp_dir: Arc<TmpDir>,
signer: ed25519_dalek::SigningKey,
) -> Result<Self, Error> {
let scratch_dir = TmpDir::new().await?;
Command::new(CONTAINER_TOOL)
.arg("run")
.arg("--rm")
.arg("--privileged")
.arg("tonistiigi/binfmt")
.arg("--install")
.arg("all")
.invoke(ErrorKind::Docker)
.await?;
let mut archive = DirectoryContents::<PackSource>::new();
let mut archive = DirectoryContents::<TmpSource<PackSource>>::new();
// manifest.json
let manifest_raw = reader.manifest().await?;
@@ -56,33 +61,35 @@ impl S9pk<Section<MultiCursorFile>> {
let license: Arc<[u8]> = reader.license().await?.to_vec().await?.into();
archive.insert_path(
"LICENSE.md",
Entry::file(PackSource::Buffered(license.into())),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(license.into()),
)),
)?;
// instructions.md
let instructions: Arc<[u8]> = reader.instructions().await?.to_vec().await?.into();
archive.insert_path(
"instructions.md",
Entry::file(PackSource::Buffered(instructions.into())),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(instructions.into()),
)),
)?;
// icon.md
let icon: Arc<[u8]> = reader.icon().await?.to_vec().await?.into();
archive.insert_path(
format!("icon.{}", manifest.assets.icon_type()),
Entry::file(PackSource::Buffered(icon.into())),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(icon.into()),
)),
)?;
// images
for arch in reader.docker_arches().await? {
let images_dir = scratch_dir.join("images").join(&arch);
let docker_platform = if arch == "x86_64" {
"--platform=linux/amd64".to_owned()
} else if arch == "aarch64" {
"--platform=linux/arm64".to_owned()
} else {
format!("--platform=linux/{arch}")
};
let images_dir = tmp_dir.join("images").join(&arch);
tokio::fs::create_dir_all(&images_dir).await?;
Command::new(CONTAINER_TOOL)
.arg("load")
@@ -93,97 +100,24 @@ impl S9pk<Section<MultiCursorFile>> {
let mut image_config = new_manifest.images.remove(image).unwrap_or_default();
image_config.arch.insert(arch.as_str().into());
new_manifest.images.insert(image.clone(), image_config);
let sqfs_path = images_dir.join(image).with_extension("squashfs");
let image_name = if *system {
format!("start9/{}:latest", image)
} else {
format!("start9/{}/{}:{}", manifest.id, image, manifest.version)
};
let id = String::from_utf8(
Command::new(CONTAINER_TOOL)
.arg("create")
.arg(&docker_platform)
.arg(&image_name)
.invoke(ErrorKind::Docker)
.await?,
)?;
let env = String::from_utf8(
Command::new(CONTAINER_TOOL)
.arg("run")
.arg("--rm")
.arg(&docker_platform)
.arg("--entrypoint")
.arg("env")
.arg(&image_name)
.invoke(ErrorKind::Docker)
.await?,
)?
.lines()
.filter(|l| {
l.trim()
.split_once("=")
.map_or(false, |(v, _)| !SKIP_ENV.contains(&v))
})
.join("\n")
+ "\n";
let workdir = Path::new(
String::from_utf8(
Command::new(CONTAINER_TOOL)
.arg("run")
.arg("--rm")
.arg(&docker_platform)
.arg("--entrypoint")
.arg("pwd")
.arg(&image_name)
.invoke(ErrorKind::Docker)
.await?,
)?
.trim(),
)
.to_owned();
Command::new("bash")
.arg("-c")
.arg(format!(
"{CONTAINER_TOOL} export {id} | mksquashfs - {sqfs} -tar",
id = id.trim(),
sqfs = sqfs_path.display()
))
.invoke(ErrorKind::Docker)
ImageSource::DockerTag(image_name.clone())
.load(
tmp_dir.clone(),
&new_manifest.id,
&new_manifest.version,
image,
&arch,
&mut archive,
)
.await?;
Command::new(CONTAINER_TOOL)
.arg("rm")
.arg(id.trim())
.invoke(ErrorKind::Docker)
.await?;
archive.insert_path(
Path::new("images")
.join(&arch)
.join(&image)
.with_extension("squashfs"),
Entry::file(PackSource::File(sqfs_path)),
)?;
archive.insert_path(
Path::new("images")
.join(&arch)
.join(&image)
.with_extension("env"),
Entry::file(PackSource::Buffered(Vec::from(env).into())),
)?;
archive.insert_path(
Path::new("images")
.join(&arch)
.join(&image)
.with_extension("json"),
Entry::file(PackSource::Buffered(
serde_json::to_vec(&serde_json::json!({
"workdir": workdir
}))
.with_kind(ErrorKind::Serialization)?
.into(),
)),
)?;
Command::new(CONTAINER_TOOL)
.arg("rmi")
.arg("-f")
.arg(&image_name)
.invoke(ErrorKind::Docker)
.await?;
@@ -191,7 +125,7 @@ impl S9pk<Section<MultiCursorFile>> {
}
// assets
let asset_dir = scratch_dir.join("assets");
let asset_dir = tmp_dir.join("assets");
tokio::fs::create_dir_all(&asset_dir).await?;
tokio_tar::Archive::new(reader.assets().await?)
.unpack(&asset_dir)
@@ -212,21 +146,21 @@ impl S9pk<Section<MultiCursorFile>> {
Path::new("assets")
.join(&asset_id)
.with_extension("squashfs"),
Entry::file(PackSource::File(sqfs_path)),
Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(sqfs_path))),
)?;
}
// javascript
let js_dir = scratch_dir.join("javascript");
let js_dir = tmp_dir.join("javascript");
let sqfs_path = js_dir.with_extension("squashfs");
tokio::fs::create_dir_all(&js_dir).await?;
if let Some(mut scripts) = reader.scripts().await? {
let mut js_file = File::create(js_dir.join("embassy.js")).await?;
let mut js_file = create_file(js_dir.join("embassy.js")).await?;
tokio::io::copy(&mut scripts, &mut js_file).await?;
js_file.sync_all().await?;
}
{
let mut js_file = File::create(js_dir.join("embassyManifest.json")).await?;
let mut js_file = create_file(js_dir.join("embassyManifest.json")).await?;
js_file
.write_all(&serde_json::to_vec(&manifest_raw).with_kind(ErrorKind::Serialization)?)
.await?;
@@ -239,30 +173,24 @@ impl S9pk<Section<MultiCursorFile>> {
.await?;
archive.insert_path(
Path::new("javascript.squashfs"),
Entry::file(PackSource::File(sqfs_path)),
Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(sqfs_path))),
)?;
archive.insert_path(
"manifest.json",
Entry::file(PackSource::Buffered(
serde_json::to_vec::<Manifest>(&new_manifest)
.with_kind(ErrorKind::Serialization)?
.into(),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(
serde_json::to_vec::<Manifest>(&new_manifest)
.with_kind(ErrorKind::Serialization)?
.into(),
),
)),
)?;
let mut s9pk = S9pk::new(MerkleArchive::new(archive, signer, SIG_CONTEXT), None).await?;
let mut dest_file = File::create(destination.as_ref()).await?;
s9pk.serialize(&mut dest_file, false).await?;
dest_file.sync_all().await?;
scratch_dir.delete().await?;
Ok(S9pk::deserialize(
&MultiCursorFile::from(File::open(destination.as_ref()).await?),
None,
)
.await?)
let mut res = S9pk::new(MerkleArchive::new(archive, signer, SIG_CONTEXT), None).await?;
res.as_archive_mut().update_hashes(true).await?;
Ok(res)
}
}
@@ -272,7 +200,7 @@ impl From<ManifestV1> for Manifest {
Self {
id: value.id,
title: value.title,
version: value.version,
version: ExtendedVersion::from(value.version).into(),
release_notes: value.release_notes,
license: value.license.into(),
wrapper_repo: value.wrapper_repo,

View File

@@ -2,6 +2,7 @@ use std::collections::{BTreeMap, BTreeSet};
use std::path::Path;
use color_eyre::eyre::eyre;
use exver::Version;
use helpers::const_true;
use imbl_value::InternedString;
pub use models::PackageId;
@@ -20,8 +21,8 @@ use crate::util::serde::Regex;
use crate::util::VersionString;
use crate::version::{Current, VersionT};
fn current_version() -> VersionString {
Current::new().semver().into()
fn current_version() -> Version {
Current::new().semver()
}
#[derive(Clone, Debug, Deserialize, Serialize, HasModel, TS)]
@@ -59,7 +60,8 @@ pub struct Manifest {
#[ts(type = "string | null")]
pub git_hash: Option<GitHash>,
#[serde(default = "current_version")]
pub os_version: VersionString,
#[ts(type = "string")]
pub os_version: Version,
#[serde(default = "const_true")]
pub has_config: bool,
}

View File

@@ -12,10 +12,12 @@ use crate::s9pk::manifest::Manifest;
use crate::s9pk::merkle_archive::file_contents::FileContents;
use crate::s9pk::merkle_archive::sink::Sink;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section};
use crate::s9pk::merkle_archive::source::{
ArchiveSource, DynFileSource, FileSource, Section, TmpSource,
};
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
use crate::s9pk::v2::pack::{ImageSource, PackSource};
use crate::util::io::TmpDir;
use crate::util::io::{open_file, TmpDir};
const MAGIC_AND_VERSION: &[u8] = &[0x3b, 0x3b, 0x02];
@@ -165,8 +167,8 @@ impl<S: FileSource + Clone> S9pk<S> {
}
}
impl<S: From<PackSource> + FileSource + Clone> S9pk<S> {
pub async fn load_images(&mut self, tmpdir: &TmpDir) -> Result<(), Error> {
impl<S: From<TmpSource<PackSource>> + FileSource + Clone> S9pk<S> {
pub async fn load_images(&mut self, tmp_dir: Arc<TmpDir>) -> Result<(), Error> {
let id = &self.manifest.id;
let version = &self.manifest.version;
for (image_id, image_config) in &mut self.manifest.images {
@@ -175,7 +177,7 @@ impl<S: From<PackSource> + FileSource + Clone> S9pk<S> {
image_config
.source
.load(
tmpdir,
tmp_dir.clone(),
id,
version,
image_id,
@@ -206,7 +208,7 @@ impl<S: ArchiveSource + Clone> S9pk<Section<S>> {
)
.await?;
let mut magic_version = [0u8; 3];
let mut magic_version = [0u8; MAGIC_AND_VERSION.len()];
header.read_exact(&mut magic_version).await?;
ensure_code!(
&magic_version == MAGIC_AND_VERSION,
@@ -232,7 +234,7 @@ impl S9pk {
Self::deserialize(&MultiCursorFile::from(file), None).await
}
pub async fn open(path: impl AsRef<Path>, id: Option<&PackageId>) -> Result<Self, Error> {
let res = Self::from_file(tokio::fs::File::open(path).await?).await?;
let res = Self::from_file(open_file(path).await?).await?;
if let Some(id) = id {
ensure_code!(
&res.as_manifest().id == id,

View File

@@ -10,7 +10,6 @@ use futures::{FutureExt, TryStreamExt};
use imbl_value::InternedString;
use models::{ImageId, PackageId, VersionString};
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use tokio::io::AsyncRead;
use tokio::process::Command;
use tokio::sync::OnceCell;
@@ -23,12 +22,12 @@ use crate::rpc_continuations::Guid;
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::{
into_dyn_read, ArchiveSource, DynFileSource, FileSource,
into_dyn_read, ArchiveSource, DynFileSource, FileSource, TmpSource,
};
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
use crate::s9pk::v2::SIG_CONTEXT;
use crate::s9pk::S9pk;
use crate::util::io::TmpDir;
use crate::util::io::{create_file, open_file, TmpDir};
use crate::util::Invoke;
#[cfg(not(feature = "docker"))]
@@ -64,7 +63,7 @@ impl SqfsDir {
.invoke(ErrorKind::Filesystem)
.await?;
Ok(MultiCursorFile::from(
File::open(&path)
open_file(&path)
.await
.with_ctx(|_| (ErrorKind::Filesystem, path.display()))?,
))
@@ -100,11 +99,7 @@ impl FileSource for PackSource {
async fn reader(&self) -> Result<Self::Reader, Error> {
match self {
Self::Buffered(a) => Ok(into_dyn_read(Cursor::new(a.clone()))),
Self::File(f) => Ok(into_dyn_read(
File::open(f)
.await
.with_ctx(|_| (ErrorKind::Filesystem, f.display()))?,
)),
Self::File(f) => Ok(into_dyn_read(open_file(f).await?)),
Self::Squashfs(dir) => dir.file().await?.fetch_all().await.map(into_dyn_read),
}
}
@@ -284,9 +279,9 @@ pub enum ImageSource {
}
impl ImageSource {
#[instrument(skip_all)]
pub fn load<'a, S: From<PackSource> + FileSource + Clone>(
pub fn load<'a, S: From<TmpSource<PackSource>> + FileSource + Clone>(
&'a self,
tmpdir: &'a TmpDir,
tmp_dir: Arc<TmpDir>,
id: &'a PackageId,
version: &'a VersionString,
image_id: &'a ImageId,
@@ -331,12 +326,13 @@ impl ImageSource {
.arg(&tag)
.arg(&docker_platform)
.arg("-o")
.arg("type=image")
.arg("type=docker,dest=-")
.capture(false)
.pipe(Command::new(CONTAINER_TOOL).arg("load"))
.invoke(ErrorKind::Docker)
.await?;
ImageSource::DockerTag(tag.clone())
.load(tmpdir, id, version, image_id, arch, into)
.load(tmp_dir, id, version, image_id, arch, into)
.await?;
Command::new(CONTAINER_TOOL)
.arg("rmi")
@@ -390,21 +386,24 @@ impl ImageSource {
into.insert_path(
base_path.with_extension("json"),
Entry::file(
PackSource::Buffered(
serde_json::to_vec(&ImageMetadata {
workdir: if config.working_dir == Path::new("") {
"/".into()
} else {
config.working_dir
},
user: if config.user.is_empty() {
"root".into()
} else {
config.user.into()
},
})
.with_kind(ErrorKind::Serialization)?
.into(),
TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(
serde_json::to_vec(&ImageMetadata {
workdir: if config.working_dir == Path::new("") {
"/".into()
} else {
config.working_dir
},
user: if config.user.is_empty() {
"root".into()
} else {
config.user.into()
},
})
.with_kind(ErrorKind::Serialization)?
.into(),
),
)
.into(),
),
@@ -412,10 +411,16 @@ impl ImageSource {
into.insert_path(
base_path.with_extension("env"),
Entry::file(
PackSource::Buffered(config.env.join("\n").into_bytes().into()).into(),
TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(config.env.join("\n").into_bytes().into()),
)
.into(),
),
)?;
let dest = tmpdir.join(Guid::new().as_ref()).with_extension("squashfs");
let dest = tmp_dir
.join(Guid::new().as_ref())
.with_extension("squashfs");
let container = String::from_utf8(
Command::new(CONTAINER_TOOL)
.arg("create")
@@ -438,7 +443,7 @@ impl ImageSource {
.await?;
into.insert_path(
base_path.with_extension("squashfs"),
Entry::file(PackSource::File(dest).into()),
Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(dest)).into()),
)?;
Ok(())
@@ -460,8 +465,8 @@ pub struct ImageMetadata {
#[instrument(skip_all)]
pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
let tmpdir = Arc::new(TmpDir::new().await?);
let mut files = DirectoryContents::<PackSource>::new();
let tmp_dir = Arc::new(TmpDir::new().await?);
let mut files = DirectoryContents::<TmpSource<PackSource>>::new();
let js_dir = params.javascript();
let manifest: Arc<[u8]> = Command::new("node")
.arg("-e")
@@ -474,7 +479,10 @@ pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
.into();
files.insert(
"manifest.json".into(),
Entry::file(PackSource::Buffered(manifest.clone())),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(manifest.clone()),
)),
);
let icon = params.icon().await?;
let icon_ext = icon
@@ -483,22 +491,28 @@ pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
.to_string_lossy();
files.insert(
InternedString::from_display(&lazy_format!("icon.{}", icon_ext)),
Entry::file(PackSource::File(icon)),
Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(icon))),
);
files.insert(
"LICENSE.md".into(),
Entry::file(PackSource::File(params.license())),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::File(params.license()),
)),
);
files.insert(
"instructions.md".into(),
Entry::file(PackSource::File(params.instructions())),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::File(params.instructions()),
)),
);
files.insert(
"javascript.squashfs".into(),
Entry::file(PackSource::Squashfs(Arc::new(SqfsDir::new(
js_dir,
tmpdir.clone(),
)))),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Squashfs(Arc::new(SqfsDir::new(js_dir, tmp_dir.clone()))),
)),
);
let mut s9pk = S9pk::new(
@@ -511,26 +525,29 @@ pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
for assets in s9pk.as_manifest().assets.clone() {
s9pk.as_archive_mut().contents_mut().insert_path(
Path::new("assets").join(&assets).with_extension("squashfs"),
Entry::file(PackSource::Squashfs(Arc::new(SqfsDir::new(
assets_dir.join(&assets),
tmpdir.clone(),
)))),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Squashfs(Arc::new(SqfsDir::new(
assets_dir.join(&assets),
tmp_dir.clone(),
))),
)),
)?;
}
s9pk.load_images(&*tmpdir).await?;
s9pk.load_images(tmp_dir.clone()).await?;
s9pk.validate_and_filter(None)?;
s9pk.serialize(
&mut File::create(params.output(&s9pk.as_manifest().id)).await?,
&mut create_file(params.output(&s9pk.as_manifest().id)).await?,
false,
)
.await?;
drop(s9pk);
tmpdir.gc().await?;
tmp_dir.gc().await?;
Ok(())
}