mirror of
https://github.com/Start9Labs/start-os.git
synced 2026-03-30 20:14:49 +00:00
feature: pack s9pk (#2642)
* TODO: images * wip * pack s9pk images * include path in packsource error * debug info * add cmd as context to invoke * filehelper bugfix * fix file helper * fix exposeForDependents * misc fixes * force image removal * fix filtering * fix deadlock * fix api * chore: Up the version of the package.json * always allow concurrency within same call stack * Update core/startos/src/s9pk/merkle_archive/expected.rs Co-authored-by: Jade <2364004+Blu-J@users.noreply.github.com> --------- Co-authored-by: J H <dragondef@gmail.com> Co-authored-by: Jade <2364004+Blu-J@users.noreply.github.com>
This commit is contained in:
@@ -211,7 +211,10 @@ impl<S: FileSource + Clone> DirectoryContents<S> {
|
||||
if !filter(path) {
|
||||
if v.hash.is_none() {
|
||||
return Err(Error::new(
|
||||
eyre!("cannot filter out unhashed file, run `update_hashes` first"),
|
||||
eyre!(
|
||||
"cannot filter out unhashed file {}, run `update_hashes` first",
|
||||
path.display()
|
||||
),
|
||||
ErrorKind::InvalidRequest,
|
||||
));
|
||||
}
|
||||
|
||||
103
core/startos/src/s9pk/merkle_archive/expected.rs
Normal file
103
core/startos/src/s9pk/merkle_archive/expected.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
|
||||
use std::ffi::OsStr;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::prelude::*;
|
||||
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
|
||||
use crate::s9pk::merkle_archive::source::FileSource;
|
||||
use crate::s9pk::merkle_archive::Entry;
|
||||
|
||||
/// An object for tracking the files expected to be in an s9pk
|
||||
pub struct Expected<'a, T> {
|
||||
keep: DirectoryContents<()>,
|
||||
dir: &'a DirectoryContents<T>,
|
||||
}
|
||||
impl<'a, T> Expected<'a, T> {
|
||||
pub fn new(dir: &'a DirectoryContents<T>,) -> Self {
|
||||
Self {
|
||||
keep: DirectoryContents::new(),
|
||||
dir
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a, T: Clone> Expected<'a, T> {
|
||||
pub fn check_file(&mut self, path: impl AsRef<Path>) -> Result<(), Error> {
|
||||
if self
|
||||
.dir
|
||||
.get_path(path.as_ref())
|
||||
.and_then(|e| e.as_file())
|
||||
.is_some()
|
||||
{
|
||||
self.keep.insert_path(path, Entry::file(()))?;
|
||||
Ok(())
|
||||
} else {
|
||||
Err(Error::new(
|
||||
eyre!("file {} missing from archive", path.as_ref().display()),
|
||||
ErrorKind::ParseS9pk,
|
||||
))
|
||||
}
|
||||
}
|
||||
pub fn check_stem(
|
||||
&mut self,
|
||||
path: impl AsRef<Path>,
|
||||
mut valid_extension: impl FnMut(Option<&OsStr>) -> bool,
|
||||
) -> Result<(), Error> {
|
||||
let (dir, stem) = if let Some(parent) = path.as_ref().parent().filter(|p| *p != Path::new("")) {
|
||||
(
|
||||
self.dir
|
||||
.get_path(parent)
|
||||
.and_then(|e| e.as_directory())
|
||||
.ok_or_else(|| {
|
||||
Error::new(
|
||||
eyre!("directory {} missing from archive", parent.display()),
|
||||
ErrorKind::ParseS9pk,
|
||||
)
|
||||
})?,
|
||||
path.as_ref().strip_prefix(parent).unwrap(),
|
||||
)
|
||||
} else {
|
||||
(self.dir, path.as_ref())
|
||||
};
|
||||
let name = dir
|
||||
.with_stem(&stem.as_os_str().to_string_lossy())
|
||||
.filter(|(_, e)| e.as_file().is_some())
|
||||
.try_fold(
|
||||
Err(Error::new(
|
||||
eyre!(
|
||||
"file {} with valid extension missing from archive",
|
||||
path.as_ref().display()
|
||||
),
|
||||
ErrorKind::ParseS9pk,
|
||||
)),
|
||||
|acc, (name, _)|
|
||||
if valid_extension(Path::new(&*name).extension()) {
|
||||
match acc {
|
||||
Ok(_) => Err(Error::new(
|
||||
eyre!(
|
||||
"more than one file matching {} with valid extension in archive",
|
||||
path.as_ref().display()
|
||||
),
|
||||
ErrorKind::ParseS9pk,
|
||||
)),
|
||||
Err(_) => Ok(Ok(name))
|
||||
}
|
||||
} else {
|
||||
Ok(acc)
|
||||
}
|
||||
)??;
|
||||
self.keep
|
||||
.insert_path(path.as_ref().with_file_name(name), Entry::file(()))?;
|
||||
Ok(())
|
||||
}
|
||||
pub fn into_filter(self) -> Filter {
|
||||
Filter(self.keep)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Filter(DirectoryContents<()>);
|
||||
impl Filter {
|
||||
pub fn keep_checked<T: FileSource + Clone>(&self, dir: &mut DirectoryContents<T>) -> Result<(), Error> {
|
||||
dir.filter(|path| self.0.get_path(path).is_some())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ use crate::util::serde::Base64;
|
||||
use crate::CAP_1_MiB;
|
||||
|
||||
pub mod directory_contents;
|
||||
pub mod expected;
|
||||
pub mod file_contents;
|
||||
pub mod hash;
|
||||
pub mod sink;
|
||||
@@ -217,6 +218,9 @@ impl<S> Entry<S> {
|
||||
pub fn file(source: S) -> Self {
|
||||
Self::new(EntryContents::File(FileContents::new(source)))
|
||||
}
|
||||
pub fn directory(directory: DirectoryContents<S>) -> Self {
|
||||
Self::new(EntryContents::Directory(directory))
|
||||
}
|
||||
pub fn hash(&self) -> Option<(Hash, u64)> {
|
||||
self.hash
|
||||
}
|
||||
|
||||
@@ -280,3 +280,8 @@ impl<S: ArchiveSource> FileSource for Section<S> {
|
||||
self.source.copy_to(self.position, self.size, w).await
|
||||
}
|
||||
}
|
||||
|
||||
pub type DynRead = Box<dyn AsyncRead + Unpin + Send + Sync + 'static>;
|
||||
pub fn into_dyn_read<R: AsyncRead + Unpin + Send + Sync + 'static>(r: R) -> DynRead {
|
||||
Box::new(r)
|
||||
}
|
||||
|
||||
@@ -97,7 +97,7 @@ impl ArchiveSource for MultiCursorFile {
|
||||
.ok()
|
||||
.map(|m| m.len())
|
||||
}
|
||||
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send, Error> {
|
||||
async fn fetch_all(&self) -> Result<impl AsyncRead + Unpin + Send + 'static, Error> {
|
||||
use tokio::io::AsyncSeekExt;
|
||||
|
||||
let mut file = self.cursor().await?;
|
||||
|
||||
@@ -1,32 +1,26 @@
|
||||
use std::collections::BTreeSet;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::Parser;
|
||||
use itertools::Itertools;
|
||||
use models::ImageId;
|
||||
use rpc_toolkit::{from_fn_async, Empty, HandlerExt, ParentHandler};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::fs::File;
|
||||
use tokio::process::Command;
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::context::CliContext;
|
||||
use crate::prelude::*;
|
||||
use crate::s9pk::manifest::Manifest;
|
||||
use crate::s9pk::merkle_archive::source::DynFileSource;
|
||||
use crate::s9pk::merkle_archive::Entry;
|
||||
use crate::s9pk::v2::compat::CONTAINER_TOOL;
|
||||
use crate::s9pk::v2::pack::ImageConfig;
|
||||
use crate::s9pk::v2::SIG_CONTEXT;
|
||||
use crate::s9pk::S9pk;
|
||||
use crate::util::io::TmpDir;
|
||||
use crate::util::serde::{apply_expr, HandlerExtSerde};
|
||||
use crate::util::Invoke;
|
||||
|
||||
pub const SKIP_ENV: &[&str] = &["TERM", "container", "HOME", "HOSTNAME"];
|
||||
|
||||
pub fn s9pk() -> ParentHandler<CliContext> {
|
||||
ParentHandler::new()
|
||||
.subcommand("pack", from_fn_async(super::v2::pack::pack).no_display())
|
||||
.subcommand("edit", edit())
|
||||
.subcommand("inspect", inspect())
|
||||
}
|
||||
@@ -77,117 +71,21 @@ fn inspect() -> ParentHandler<CliContext, S9pkPath> {
|
||||
#[derive(Deserialize, Serialize, Parser, TS)]
|
||||
struct AddImageParams {
|
||||
id: ImageId,
|
||||
image: String,
|
||||
arches: Option<Vec<String>>,
|
||||
#[command(flatten)]
|
||||
config: ImageConfig,
|
||||
}
|
||||
async fn add_image(
|
||||
ctx: CliContext,
|
||||
AddImageParams { id, image, arches }: AddImageParams,
|
||||
AddImageParams { id, config }: AddImageParams,
|
||||
S9pkPath { s9pk: s9pk_path }: S9pkPath,
|
||||
) -> Result<(), Error> {
|
||||
let mut s9pk = S9pk::from_file(super::load(&ctx, &s9pk_path).await?, false)
|
||||
let mut s9pk = S9pk::from_file(super::load(&ctx, &s9pk_path).await?)
|
||||
.await?
|
||||
.into_dyn();
|
||||
let arches: BTreeSet<_> = arches
|
||||
.unwrap_or_else(|| vec!["x86_64".to_owned(), "aarch64".to_owned()])
|
||||
.into_iter()
|
||||
.collect();
|
||||
s9pk.as_manifest_mut().images.insert(id, config);
|
||||
let tmpdir = TmpDir::new().await?;
|
||||
for arch in arches {
|
||||
let sqfs_path = tmpdir.join(format!("image.{arch}.squashfs"));
|
||||
let docker_platform = if arch == "x86_64" {
|
||||
"--platform=linux/amd64".to_owned()
|
||||
} else if arch == "aarch64" {
|
||||
"--platform=linux/arm64".to_owned()
|
||||
} else {
|
||||
format!("--platform=linux/{arch}")
|
||||
};
|
||||
let env = String::from_utf8(
|
||||
Command::new(CONTAINER_TOOL)
|
||||
.arg("run")
|
||||
.arg("--rm")
|
||||
.arg(&docker_platform)
|
||||
.arg("--entrypoint")
|
||||
.arg("env")
|
||||
.arg(&image)
|
||||
.invoke(ErrorKind::Docker)
|
||||
.await?,
|
||||
)?
|
||||
.lines()
|
||||
.filter(|l| {
|
||||
l.trim()
|
||||
.split_once("=")
|
||||
.map_or(false, |(v, _)| !SKIP_ENV.contains(&v))
|
||||
})
|
||||
.join("\n")
|
||||
+ "\n";
|
||||
let workdir = Path::new(
|
||||
String::from_utf8(
|
||||
Command::new(CONTAINER_TOOL)
|
||||
.arg("run")
|
||||
.arg(&docker_platform)
|
||||
.arg("--rm")
|
||||
.arg("--entrypoint")
|
||||
.arg("pwd")
|
||||
.arg(&image)
|
||||
.invoke(ErrorKind::Docker)
|
||||
.await?,
|
||||
)?
|
||||
.trim(),
|
||||
)
|
||||
.to_owned();
|
||||
let container_id = String::from_utf8(
|
||||
Command::new(CONTAINER_TOOL)
|
||||
.arg("create")
|
||||
.arg(&docker_platform)
|
||||
.arg(&image)
|
||||
.invoke(ErrorKind::Docker)
|
||||
.await?,
|
||||
)?;
|
||||
Command::new("bash")
|
||||
.arg("-c")
|
||||
.arg(format!(
|
||||
"{CONTAINER_TOOL} export {container_id} | mksquashfs - {sqfs} -tar",
|
||||
container_id = container_id.trim(),
|
||||
sqfs = sqfs_path.display()
|
||||
))
|
||||
.invoke(ErrorKind::Docker)
|
||||
.await?;
|
||||
Command::new(CONTAINER_TOOL)
|
||||
.arg("rm")
|
||||
.arg(container_id.trim())
|
||||
.invoke(ErrorKind::Docker)
|
||||
.await?;
|
||||
let archive = s9pk.as_archive_mut();
|
||||
archive.set_signer(ctx.developer_key()?.clone(), SIG_CONTEXT);
|
||||
archive.contents_mut().insert_path(
|
||||
Path::new("images")
|
||||
.join(&arch)
|
||||
.join(&id)
|
||||
.with_extension("squashfs"),
|
||||
Entry::file(DynFileSource::new(sqfs_path)),
|
||||
)?;
|
||||
archive.contents_mut().insert_path(
|
||||
Path::new("images")
|
||||
.join(&arch)
|
||||
.join(&id)
|
||||
.with_extension("env"),
|
||||
Entry::file(DynFileSource::new(Arc::<[u8]>::from(Vec::from(env)))),
|
||||
)?;
|
||||
archive.contents_mut().insert_path(
|
||||
Path::new("images")
|
||||
.join(&arch)
|
||||
.join(&id)
|
||||
.with_extension("json"),
|
||||
Entry::file(DynFileSource::new(Arc::<[u8]>::from(
|
||||
serde_json::to_vec(&serde_json::json!({
|
||||
"workdir": workdir
|
||||
}))
|
||||
.with_kind(ErrorKind::Serialization)?,
|
||||
))),
|
||||
)?;
|
||||
}
|
||||
s9pk.as_manifest_mut().images.insert(id);
|
||||
s9pk.load_images(&tmpdir).await?;
|
||||
s9pk.validate_and_filter(None)?;
|
||||
let tmp_path = s9pk_path.with_extension("s9pk.tmp");
|
||||
let mut tmp_file = File::create(&tmp_path).await?;
|
||||
s9pk.serialize(&mut tmp_file, true).await?;
|
||||
@@ -206,7 +104,7 @@ async fn edit_manifest(
|
||||
EditManifestParams { expression }: EditManifestParams,
|
||||
S9pkPath { s9pk: s9pk_path }: S9pkPath,
|
||||
) -> Result<Manifest, Error> {
|
||||
let mut s9pk = S9pk::from_file(super::load(&ctx, &s9pk_path).await?, false).await?;
|
||||
let mut s9pk = S9pk::from_file(super::load(&ctx, &s9pk_path).await?).await?;
|
||||
let old = serde_json::to_value(s9pk.as_manifest()).with_kind(ErrorKind::Serialization)?;
|
||||
*s9pk.as_manifest_mut() = serde_json::from_value(apply_expr(old.into(), &expression)?.into())
|
||||
.with_kind(ErrorKind::Serialization)?;
|
||||
@@ -227,7 +125,7 @@ async fn file_tree(
|
||||
_: Empty,
|
||||
S9pkPath { s9pk }: S9pkPath,
|
||||
) -> Result<Vec<PathBuf>, Error> {
|
||||
let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?, false).await?;
|
||||
let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?;
|
||||
Ok(s9pk.as_archive().contents().file_paths(""))
|
||||
}
|
||||
|
||||
@@ -244,7 +142,7 @@ async fn cat(
|
||||
) -> Result<(), Error> {
|
||||
use crate::s9pk::merkle_archive::source::FileSource;
|
||||
|
||||
let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?, false).await?;
|
||||
let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?;
|
||||
tokio::io::copy(
|
||||
&mut s9pk
|
||||
.as_archive()
|
||||
@@ -266,6 +164,6 @@ async fn inspect_manifest(
|
||||
_: Empty,
|
||||
S9pkPath { s9pk }: S9pkPath,
|
||||
) -> Result<Manifest, Error> {
|
||||
let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?, false).await?;
|
||||
let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?;
|
||||
Ok(s9pk.as_manifest().clone())
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::io::Cursor;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use itertools::Itertools;
|
||||
@@ -14,49 +13,18 @@ use crate::prelude::*;
|
||||
use crate::s9pk::manifest::Manifest;
|
||||
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
|
||||
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
|
||||
use crate::s9pk::merkle_archive::source::{FileSource, Section};
|
||||
use crate::s9pk::merkle_archive::source::Section;
|
||||
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
|
||||
use crate::s9pk::rpc::SKIP_ENV;
|
||||
use crate::s9pk::v1::manifest::{Manifest as ManifestV1, PackageProcedure};
|
||||
use crate::s9pk::v1::reader::S9pkReader;
|
||||
use crate::s9pk::v2::pack::{PackSource, CONTAINER_TOOL};
|
||||
use crate::s9pk::v2::{S9pk, SIG_CONTEXT};
|
||||
use crate::util::io::TmpDir;
|
||||
use crate::util::Invoke;
|
||||
|
||||
pub const MAGIC_AND_VERSION: &[u8] = &[0x3b, 0x3b, 0x01];
|
||||
|
||||
#[cfg(not(feature = "docker"))]
|
||||
pub const CONTAINER_TOOL: &str = "podman";
|
||||
|
||||
#[cfg(feature = "docker")]
|
||||
pub const CONTAINER_TOOL: &str = "docker";
|
||||
|
||||
type DynRead = Box<dyn AsyncRead + Unpin + Send + Sync + 'static>;
|
||||
fn into_dyn_read<R: AsyncRead + Unpin + Send + Sync + 'static>(r: R) -> DynRead {
|
||||
Box::new(r)
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum CompatSource {
|
||||
Buffered(Arc<[u8]>),
|
||||
File(PathBuf),
|
||||
}
|
||||
impl FileSource for CompatSource {
|
||||
type Reader = Box<dyn AsyncRead + Unpin + Send + Sync + 'static>;
|
||||
async fn size(&self) -> Result<u64, Error> {
|
||||
match self {
|
||||
Self::Buffered(a) => Ok(a.len() as u64),
|
||||
Self::File(f) => Ok(tokio::fs::metadata(f).await?.len()),
|
||||
}
|
||||
}
|
||||
async fn reader(&self) -> Result<Self::Reader, Error> {
|
||||
match self {
|
||||
Self::Buffered(a) => Ok(into_dyn_read(Cursor::new(a.clone()))),
|
||||
Self::File(f) => Ok(into_dyn_read(File::open(f).await?)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl S9pk<Section<MultiCursorFile>> {
|
||||
#[instrument(skip_all)]
|
||||
pub async fn from_v1<R: AsyncRead + AsyncSeek + Unpin + Send + Sync>(
|
||||
@@ -66,7 +34,7 @@ impl S9pk<Section<MultiCursorFile>> {
|
||||
) -> Result<Self, Error> {
|
||||
let scratch_dir = TmpDir::new().await?;
|
||||
|
||||
let mut archive = DirectoryContents::<CompatSource>::new();
|
||||
let mut archive = DirectoryContents::<PackSource>::new();
|
||||
|
||||
// manifest.json
|
||||
let manifest_raw = reader.manifest().await?;
|
||||
@@ -88,21 +56,21 @@ impl S9pk<Section<MultiCursorFile>> {
|
||||
let license: Arc<[u8]> = reader.license().await?.to_vec().await?.into();
|
||||
archive.insert_path(
|
||||
"LICENSE.md",
|
||||
Entry::file(CompatSource::Buffered(license.into())),
|
||||
Entry::file(PackSource::Buffered(license.into())),
|
||||
)?;
|
||||
|
||||
// instructions.md
|
||||
let instructions: Arc<[u8]> = reader.instructions().await?.to_vec().await?.into();
|
||||
archive.insert_path(
|
||||
"instructions.md",
|
||||
Entry::file(CompatSource::Buffered(instructions.into())),
|
||||
Entry::file(PackSource::Buffered(instructions.into())),
|
||||
)?;
|
||||
|
||||
// icon.md
|
||||
let icon: Arc<[u8]> = reader.icon().await?.to_vec().await?.into();
|
||||
archive.insert_path(
|
||||
format!("icon.{}", manifest.assets.icon_type()),
|
||||
Entry::file(CompatSource::Buffered(icon.into())),
|
||||
Entry::file(PackSource::Buffered(icon.into())),
|
||||
)?;
|
||||
|
||||
// images
|
||||
@@ -122,7 +90,9 @@ impl S9pk<Section<MultiCursorFile>> {
|
||||
.invoke(ErrorKind::Docker)
|
||||
.await?;
|
||||
for (image, system) in &images {
|
||||
new_manifest.images.insert(image.clone());
|
||||
let mut image_config = new_manifest.images.remove(image).unwrap_or_default();
|
||||
image_config.arch.insert(arch.as_str().into());
|
||||
new_manifest.images.insert(image.clone(), image_config);
|
||||
let sqfs_path = images_dir.join(image).with_extension("squashfs");
|
||||
let image_name = if *system {
|
||||
format!("start9/{}:latest", image)
|
||||
@@ -190,21 +160,21 @@ impl S9pk<Section<MultiCursorFile>> {
|
||||
.join(&arch)
|
||||
.join(&image)
|
||||
.with_extension("squashfs"),
|
||||
Entry::file(CompatSource::File(sqfs_path)),
|
||||
Entry::file(PackSource::File(sqfs_path)),
|
||||
)?;
|
||||
archive.insert_path(
|
||||
Path::new("images")
|
||||
.join(&arch)
|
||||
.join(&image)
|
||||
.with_extension("env"),
|
||||
Entry::file(CompatSource::Buffered(Vec::from(env).into())),
|
||||
Entry::file(PackSource::Buffered(Vec::from(env).into())),
|
||||
)?;
|
||||
archive.insert_path(
|
||||
Path::new("images")
|
||||
.join(&arch)
|
||||
.join(&image)
|
||||
.with_extension("json"),
|
||||
Entry::file(CompatSource::Buffered(
|
||||
Entry::file(PackSource::Buffered(
|
||||
serde_json::to_vec(&serde_json::json!({
|
||||
"workdir": workdir
|
||||
}))
|
||||
@@ -240,7 +210,7 @@ impl S9pk<Section<MultiCursorFile>> {
|
||||
.await?;
|
||||
archive.insert_path(
|
||||
Path::new("assets").join(&asset_id),
|
||||
Entry::file(CompatSource::File(sqfs_path)),
|
||||
Entry::file(PackSource::File(sqfs_path)),
|
||||
)?;
|
||||
}
|
||||
|
||||
@@ -267,12 +237,12 @@ impl S9pk<Section<MultiCursorFile>> {
|
||||
.await?;
|
||||
archive.insert_path(
|
||||
Path::new("javascript.squashfs"),
|
||||
Entry::file(CompatSource::File(sqfs_path)),
|
||||
Entry::file(PackSource::File(sqfs_path)),
|
||||
)?;
|
||||
|
||||
archive.insert_path(
|
||||
"manifest.json",
|
||||
Entry::file(CompatSource::Buffered(
|
||||
Entry::file(PackSource::Buffered(
|
||||
serde_json::to_vec::<Manifest>(&new_manifest)
|
||||
.with_kind(ErrorKind::Serialization)?
|
||||
.into(),
|
||||
@@ -289,7 +259,6 @@ impl S9pk<Section<MultiCursorFile>> {
|
||||
Ok(S9pk::deserialize(
|
||||
&MultiCursorFile::from(File::open(destination.as_ref()).await?),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.await?)
|
||||
}
|
||||
@@ -310,7 +279,7 @@ impl From<ManifestV1> for Manifest {
|
||||
marketing_site: value.marketing_site.unwrap_or_else(|| default_url.clone()),
|
||||
donation_url: value.donation_url,
|
||||
description: value.description,
|
||||
images: BTreeSet::new(),
|
||||
images: BTreeMap::new(),
|
||||
assets: value
|
||||
.volumes
|
||||
.iter()
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::path::Path;
|
||||
|
||||
use color_eyre::eyre::eyre;
|
||||
use helpers::const_true;
|
||||
use imbl_value::InternedString;
|
||||
pub use models::PackageId;
|
||||
use models::{ImageId, VolumeId};
|
||||
use models::{mime, ImageId, VolumeId};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ts_rs::TS;
|
||||
use url::Url;
|
||||
@@ -12,6 +13,9 @@ use url::Url;
|
||||
use crate::dependencies::Dependencies;
|
||||
use crate::prelude::*;
|
||||
use crate::s9pk::git_hash::GitHash;
|
||||
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
|
||||
use crate::s9pk::merkle_archive::expected::{Expected, Filter};
|
||||
use crate::s9pk::v2::pack::ImageConfig;
|
||||
use crate::util::serde::Regex;
|
||||
use crate::util::VersionString;
|
||||
use crate::version::{Current, VersionT};
|
||||
@@ -42,7 +46,7 @@ pub struct Manifest {
|
||||
#[ts(type = "string | null")]
|
||||
pub donation_url: Option<Url>,
|
||||
pub description: Description,
|
||||
pub images: BTreeSet<ImageId>,
|
||||
pub images: BTreeMap<ImageId, ImageConfig>,
|
||||
pub assets: BTreeSet<VolumeId>, // TODO: AssetsId
|
||||
pub volumes: BTreeSet<VolumeId>,
|
||||
#[serde(default)]
|
||||
@@ -59,6 +63,83 @@ pub struct Manifest {
|
||||
#[serde(default = "const_true")]
|
||||
pub has_config: bool,
|
||||
}
|
||||
impl Manifest {
|
||||
pub fn validate_for<'a, T: Clone>(
|
||||
&self,
|
||||
arch: Option<&str>,
|
||||
archive: &'a DirectoryContents<T>,
|
||||
) -> Result<Filter, Error> {
|
||||
let mut expected = Expected::new(archive);
|
||||
expected.check_file("manifest.json")?;
|
||||
expected.check_stem("icon", |ext| {
|
||||
ext.and_then(|e| e.to_str())
|
||||
.and_then(mime)
|
||||
.map_or(false, |mime| mime.starts_with("image/"))
|
||||
})?;
|
||||
expected.check_file("LICENSE.md")?;
|
||||
expected.check_file("instructions.md")?;
|
||||
expected.check_file("javascript.squashfs")?;
|
||||
for assets in &self.assets {
|
||||
expected.check_file(Path::new("assets").join(assets).with_extension("squashfs"))?;
|
||||
}
|
||||
for (image_id, config) in &self.images {
|
||||
let mut check_arch = |arch: &str| {
|
||||
let mut arch = arch;
|
||||
if let Err(e) = expected.check_file(
|
||||
Path::new("images")
|
||||
.join(arch)
|
||||
.join(image_id)
|
||||
.with_extension("squashfs"),
|
||||
) {
|
||||
if let Some(emulate_as) = &config.emulate_missing_as {
|
||||
expected.check_file(
|
||||
Path::new("images")
|
||||
.join(arch)
|
||||
.join(image_id)
|
||||
.with_extension("squashfs"),
|
||||
)?;
|
||||
arch = &**emulate_as;
|
||||
} else {
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
expected.check_file(
|
||||
Path::new("images")
|
||||
.join(arch)
|
||||
.join(image_id)
|
||||
.with_extension("json"),
|
||||
)?;
|
||||
expected.check_file(
|
||||
Path::new("images")
|
||||
.join(arch)
|
||||
.join(image_id)
|
||||
.with_extension("env"),
|
||||
)?;
|
||||
Ok(())
|
||||
};
|
||||
if let Some(arch) = arch {
|
||||
check_arch(arch)?;
|
||||
} else if let Some(arches) = &self.hardware_requirements.arch {
|
||||
for arch in arches {
|
||||
check_arch(arch)?;
|
||||
}
|
||||
} else if let Some(arch) = config.emulate_missing_as.as_deref() {
|
||||
if !config.arch.contains(arch) {
|
||||
return Err(Error::new(
|
||||
eyre!("`emulateMissingAs` must match an included `arch`"),
|
||||
ErrorKind::ParseS9pk,
|
||||
));
|
||||
}
|
||||
for arch in &config.arch {
|
||||
check_arch(&arch)?;
|
||||
}
|
||||
} else {
|
||||
return Err(Error::new(eyre!("`emulateMissingAs` required for all images if no `arch` specified in `hardwareRequirements`"), ErrorKind::ParseS9pk));
|
||||
}
|
||||
}
|
||||
Ok(expected.into_filter())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
||||
@@ -14,7 +14,8 @@ use crate::s9pk::merkle_archive::sink::Sink;
|
||||
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
|
||||
use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section};
|
||||
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
|
||||
use crate::ARCH;
|
||||
use crate::s9pk::v2::pack::{ImageSource, PackSource};
|
||||
use crate::util::io::TmpDir;
|
||||
|
||||
const MAGIC_AND_VERSION: &[u8] = &[0x3b, 0x3b, 0x02];
|
||||
|
||||
@@ -22,6 +23,7 @@ pub const SIG_CONTEXT: &str = "s9pk";
|
||||
|
||||
pub mod compat;
|
||||
pub mod manifest;
|
||||
pub mod pack;
|
||||
|
||||
/**
|
||||
/
|
||||
@@ -34,10 +36,14 @@ pub mod manifest;
|
||||
│ └── <id>.squashfs (xN)
|
||||
└── images
|
||||
└── <arch>
|
||||
├── <id>.json (xN)
|
||||
├── <id>.env (xN)
|
||||
└── <id>.squashfs (xN)
|
||||
*/
|
||||
|
||||
// this sorts the s9pk to optimize such that the parts that are used first appear earlier in the s9pk
|
||||
// this is useful for manipulating an s9pk while partially downloaded on a source that does not support
|
||||
// random access
|
||||
fn priority(s: &str) -> Option<usize> {
|
||||
match s {
|
||||
"manifest.json" => Some(0),
|
||||
@@ -51,26 +57,6 @@ fn priority(s: &str) -> Option<usize> {
|
||||
}
|
||||
}
|
||||
|
||||
fn filter(p: &Path) -> bool {
|
||||
match p.iter().count() {
|
||||
1 if p.file_name() == Some(OsStr::new("manifest.json")) => true,
|
||||
1 if p.file_stem() == Some(OsStr::new("icon")) => true,
|
||||
1 if p.file_name() == Some(OsStr::new("LICENSE.md")) => true,
|
||||
1 if p.file_name() == Some(OsStr::new("instructions.md")) => true,
|
||||
1 if p.file_name() == Some(OsStr::new("javascript.squashfs")) => true,
|
||||
1 if p.file_name() == Some(OsStr::new("assets")) => true,
|
||||
1 if p.file_name() == Some(OsStr::new("images")) => true,
|
||||
2 if p.parent() == Some(Path::new("assets")) => {
|
||||
p.extension().map_or(false, |ext| ext == "squashfs")
|
||||
}
|
||||
2 if p.parent() == Some(Path::new("images")) => p.file_name() == Some(OsStr::new(&*ARCH)),
|
||||
3 if p.parent() == Some(&*Path::new("images").join(&*ARCH)) => p
|
||||
.extension()
|
||||
.map_or(false, |ext| ext == "squashfs" || ext == "env"),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct S9pk<S = Section<MultiCursorFile>> {
|
||||
pub manifest: Manifest,
|
||||
@@ -108,6 +94,11 @@ impl<S: FileSource + Clone> S9pk<S> {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn validate_and_filter(&mut self, arch: Option<&str>) -> Result<(), Error> {
|
||||
let filter = self.manifest.validate_for(arch, self.archive.contents())?;
|
||||
filter.keep_checked(self.archive.contents_mut())
|
||||
}
|
||||
|
||||
pub async fn icon(&self) -> Result<(InternedString, FileContents<S>), Error> {
|
||||
let mut best_icon = None;
|
||||
for (path, icon) in self
|
||||
@@ -174,12 +165,37 @@ impl<S: FileSource + Clone> S9pk<S> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: From<PackSource> + FileSource + Clone> S9pk<S> {
|
||||
pub async fn load_images(&mut self, tmpdir: &TmpDir) -> Result<(), Error> {
|
||||
let id = &self.manifest.id;
|
||||
let version = &self.manifest.version;
|
||||
for (image_id, image_config) in &mut self.manifest.images {
|
||||
self.manifest_dirty = true;
|
||||
for arch in &image_config.arch {
|
||||
image_config
|
||||
.source
|
||||
.load(
|
||||
tmpdir,
|
||||
id,
|
||||
version,
|
||||
image_id,
|
||||
arch,
|
||||
self.archive.contents_mut(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
image_config.source = ImageSource::Packed;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: ArchiveSource + Clone> S9pk<Section<S>> {
|
||||
#[instrument(skip_all)]
|
||||
pub async fn deserialize(
|
||||
source: &S,
|
||||
commitment: Option<&MerkleArchiveCommitment>,
|
||||
apply_filter: bool,
|
||||
) -> Result<Self, Error> {
|
||||
use tokio::io::AsyncReadExt;
|
||||
|
||||
@@ -201,10 +217,6 @@ impl<S: ArchiveSource + Clone> S9pk<Section<S>> {
|
||||
let mut archive =
|
||||
MerkleArchive::deserialize(source, SIG_CONTEXT, &mut header, commitment).await?;
|
||||
|
||||
if apply_filter {
|
||||
archive.filter(filter)?;
|
||||
}
|
||||
|
||||
archive.sort_by(|a, b| match (priority(a), priority(b)) {
|
||||
(Some(a), Some(b)) => a.cmp(&b),
|
||||
(Some(_), None) => std::cmp::Ordering::Less,
|
||||
@@ -216,15 +228,11 @@ impl<S: ArchiveSource + Clone> S9pk<Section<S>> {
|
||||
}
|
||||
}
|
||||
impl S9pk {
|
||||
pub async fn from_file(file: File, apply_filter: bool) -> Result<Self, Error> {
|
||||
Self::deserialize(&MultiCursorFile::from(file), None, apply_filter).await
|
||||
pub async fn from_file(file: File) -> Result<Self, Error> {
|
||||
Self::deserialize(&MultiCursorFile::from(file), None).await
|
||||
}
|
||||
pub async fn open(
|
||||
path: impl AsRef<Path>,
|
||||
id: Option<&PackageId>,
|
||||
apply_filter: bool,
|
||||
) -> Result<Self, Error> {
|
||||
let res = Self::from_file(tokio::fs::File::open(path).await?, apply_filter).await?;
|
||||
pub async fn open(path: impl AsRef<Path>, id: Option<&PackageId>) -> Result<Self, Error> {
|
||||
let res = Self::from_file(tokio::fs::File::open(path).await?).await?;
|
||||
if let Some(id) = id {
|
||||
ensure_code!(
|
||||
&res.as_manifest().id == id,
|
||||
|
||||
536
core/startos/src/s9pk/v2/pack.rs
Normal file
536
core/startos/src/s9pk/v2/pack.rs
Normal file
@@ -0,0 +1,536 @@
|
||||
use std::collections::BTreeSet;
|
||||
use std::ffi::OsStr;
|
||||
use std::io::Cursor;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use clap::Parser;
|
||||
use futures::future::{ready, BoxFuture};
|
||||
use futures::{FutureExt, TryStreamExt};
|
||||
use imbl_value::InternedString;
|
||||
use models::{ImageId, PackageId, VersionString};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::fs::File;
|
||||
use tokio::io::AsyncRead;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::OnceCell;
|
||||
use tokio_stream::wrappers::ReadDirStream;
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::context::CliContext;
|
||||
use crate::prelude::*;
|
||||
use crate::rpc_continuations::Guid;
|
||||
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
|
||||
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
|
||||
use crate::s9pk::merkle_archive::source::{
|
||||
into_dyn_read, ArchiveSource, DynFileSource, FileSource,
|
||||
};
|
||||
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
|
||||
use crate::s9pk::v2::SIG_CONTEXT;
|
||||
use crate::s9pk::S9pk;
|
||||
use crate::util::io::TmpDir;
|
||||
use crate::util::Invoke;
|
||||
|
||||
#[cfg(not(feature = "docker"))]
|
||||
pub const CONTAINER_TOOL: &str = "podman";
|
||||
|
||||
#[cfg(feature = "docker")]
|
||||
pub const CONTAINER_TOOL: &str = "docker";
|
||||
|
||||
pub struct SqfsDir {
|
||||
path: PathBuf,
|
||||
tmpdir: Arc<TmpDir>,
|
||||
sqfs: OnceCell<MultiCursorFile>,
|
||||
}
|
||||
impl SqfsDir {
|
||||
pub fn new(path: PathBuf, tmpdir: Arc<TmpDir>) -> Self {
|
||||
Self {
|
||||
path,
|
||||
tmpdir,
|
||||
sqfs: OnceCell::new(),
|
||||
}
|
||||
}
|
||||
async fn file(&self) -> Result<&MultiCursorFile, Error> {
|
||||
self.sqfs
|
||||
.get_or_try_init(|| async move {
|
||||
let guid = Guid::new();
|
||||
let path = self.tmpdir.join(guid.as_ref()).with_extension("squashfs");
|
||||
let mut cmd = Command::new("mksquashfs");
|
||||
if self.path.extension().and_then(|s| s.to_str()) == Some("tar") {
|
||||
cmd.arg("-tar");
|
||||
}
|
||||
cmd.arg(&self.path)
|
||||
.arg(&path)
|
||||
.invoke(ErrorKind::Filesystem)
|
||||
.await?;
|
||||
Ok(MultiCursorFile::from(
|
||||
File::open(&path)
|
||||
.await
|
||||
.with_ctx(|_| (ErrorKind::Filesystem, path.display()))?,
|
||||
))
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum PackSource {
|
||||
Buffered(Arc<[u8]>),
|
||||
File(PathBuf),
|
||||
Squashfs(Arc<SqfsDir>),
|
||||
}
|
||||
impl FileSource for PackSource {
|
||||
type Reader = Box<dyn AsyncRead + Unpin + Send + Sync + 'static>;
|
||||
async fn size(&self) -> Result<u64, Error> {
|
||||
match self {
|
||||
Self::Buffered(a) => Ok(a.len() as u64),
|
||||
Self::File(f) => Ok(tokio::fs::metadata(f)
|
||||
.await
|
||||
.with_ctx(|_| (ErrorKind::Filesystem, f.display()))?
|
||||
.len()),
|
||||
Self::Squashfs(dir) => dir
|
||||
.file()
|
||||
.await
|
||||
.with_ctx(|_| (ErrorKind::Filesystem, dir.path.display()))?
|
||||
.size()
|
||||
.await
|
||||
.or_not_found("file metadata"),
|
||||
}
|
||||
}
|
||||
async fn reader(&self) -> Result<Self::Reader, Error> {
|
||||
match self {
|
||||
Self::Buffered(a) => Ok(into_dyn_read(Cursor::new(a.clone()))),
|
||||
Self::File(f) => Ok(into_dyn_read(
|
||||
File::open(f)
|
||||
.await
|
||||
.with_ctx(|_| (ErrorKind::Filesystem, f.display()))?,
|
||||
)),
|
||||
Self::Squashfs(dir) => dir.file().await?.fetch_all().await.map(into_dyn_read),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl From<PackSource> for DynFileSource {
|
||||
fn from(value: PackSource) -> Self {
|
||||
DynFileSource::new(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Parser)]
|
||||
pub struct PackParams {
|
||||
pub path: Option<PathBuf>,
|
||||
#[arg(short = 'o', long = "output")]
|
||||
pub output: Option<PathBuf>,
|
||||
#[arg(long = "javascript")]
|
||||
pub javascript: Option<PathBuf>,
|
||||
#[arg(long = "icon")]
|
||||
pub icon: Option<PathBuf>,
|
||||
#[arg(long = "license")]
|
||||
pub license: Option<PathBuf>,
|
||||
#[arg(long = "instructions")]
|
||||
pub instructions: Option<PathBuf>,
|
||||
#[arg(long = "assets")]
|
||||
pub assets: Option<PathBuf>,
|
||||
}
|
||||
impl PackParams {
|
||||
fn path(&self) -> &Path {
|
||||
self.path.as_deref().unwrap_or(Path::new("."))
|
||||
}
|
||||
fn output(&self, id: &PackageId) -> PathBuf {
|
||||
self.output
|
||||
.as_ref()
|
||||
.cloned()
|
||||
.unwrap_or_else(|| self.path().join(id).with_extension("s9pk"))
|
||||
}
|
||||
fn javascript(&self) -> PathBuf {
|
||||
self.javascript
|
||||
.as_ref()
|
||||
.cloned()
|
||||
.unwrap_or_else(|| self.path().join("javascript"))
|
||||
}
|
||||
async fn icon(&self) -> Result<PathBuf, Error> {
|
||||
if let Some(icon) = &self.icon {
|
||||
Ok(icon.clone())
|
||||
} else {
|
||||
ReadDirStream::new(tokio::fs::read_dir(self.path()).await?).try_filter(|x| ready(x.path().file_stem() == Some(OsStr::new("icon")))).map_err(Error::from).try_fold(Err(Error::new(eyre!("icon not found"), ErrorKind::NotFound)), |acc, x| async move { match acc {
|
||||
Ok(_) => Err(Error::new(eyre!("multiple icons found in working directory, please specify which to use with `--icon`"), ErrorKind::InvalidRequest)),
|
||||
Err(e) => Ok({
|
||||
let path = x.path();
|
||||
if path.file_stem().and_then(|s| s.to_str()) == Some("icon") {
|
||||
Ok(path)
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
})
|
||||
}}).await?
|
||||
}
|
||||
}
|
||||
fn license(&self) -> PathBuf {
|
||||
self.license
|
||||
.as_ref()
|
||||
.cloned()
|
||||
.unwrap_or_else(|| self.path().join("LICENSE.md"))
|
||||
}
|
||||
fn instructions(&self) -> PathBuf {
|
||||
self.instructions
|
||||
.as_ref()
|
||||
.cloned()
|
||||
.unwrap_or_else(|| self.path().join("instructions.md"))
|
||||
}
|
||||
fn assets(&self) -> PathBuf {
|
||||
self.assets
|
||||
.as_ref()
|
||||
.cloned()
|
||||
.unwrap_or_else(|| self.path().join("assets"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct ImageConfig {
|
||||
pub source: ImageSource,
|
||||
#[ts(type = "string[]")]
|
||||
pub arch: BTreeSet<InternedString>,
|
||||
#[ts(type = "string | null")]
|
||||
pub emulate_missing_as: Option<InternedString>,
|
||||
}
|
||||
impl Default for ImageConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
source: ImageSource::Packed,
|
||||
arch: BTreeSet::new(),
|
||||
emulate_missing_as: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
struct CliImageConfig {
|
||||
#[arg(long, conflicts_with("docker-tag"))]
|
||||
docker_build: bool,
|
||||
#[arg(long, requires("docker-build"))]
|
||||
dockerfile: Option<PathBuf>,
|
||||
#[arg(long, requires("docker-build"))]
|
||||
workdir: Option<PathBuf>,
|
||||
#[arg(long, conflicts_with_all(["dockerfile", "workdir"]))]
|
||||
docker_tag: Option<String>,
|
||||
#[arg(long)]
|
||||
arch: Vec<InternedString>,
|
||||
#[arg(long)]
|
||||
emulate_missing_as: Option<InternedString>,
|
||||
}
|
||||
impl TryFrom<CliImageConfig> for ImageConfig {
|
||||
type Error = clap::Error;
|
||||
fn try_from(value: CliImageConfig) -> Result<Self, Self::Error> {
|
||||
let res = Self {
|
||||
source: if value.docker_build {
|
||||
ImageSource::DockerBuild {
|
||||
dockerfile: value.dockerfile,
|
||||
workdir: value.workdir,
|
||||
}
|
||||
} else if let Some(tag) = value.docker_tag {
|
||||
ImageSource::DockerTag(tag)
|
||||
} else {
|
||||
ImageSource::Packed
|
||||
},
|
||||
arch: value.arch.into_iter().collect(),
|
||||
emulate_missing_as: value.emulate_missing_as,
|
||||
};
|
||||
res.emulate_missing_as
|
||||
.as_ref()
|
||||
.map(|a| {
|
||||
if !res.arch.contains(a) {
|
||||
Err(clap::Error::raw(
|
||||
clap::error::ErrorKind::InvalidValue,
|
||||
"`emulate-missing-as` must match one of the provided `arch`es",
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.transpose()?;
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
impl clap::Args for ImageConfig {
|
||||
fn augment_args(cmd: clap::Command) -> clap::Command {
|
||||
CliImageConfig::augment_args(cmd)
|
||||
}
|
||||
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
|
||||
CliImageConfig::augment_args_for_update(cmd)
|
||||
}
|
||||
}
|
||||
impl clap::FromArgMatches for ImageConfig {
|
||||
fn from_arg_matches(matches: &clap::ArgMatches) -> Result<Self, clap::Error> {
|
||||
Self::try_from(CliImageConfig::from_arg_matches(matches)?)
|
||||
}
|
||||
fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> {
|
||||
*self = Self::try_from(CliImageConfig::from_arg_matches(matches)?)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub enum ImageSource {
|
||||
Packed,
|
||||
#[serde(rename_all = "camelCase")]
|
||||
DockerBuild {
|
||||
workdir: Option<PathBuf>,
|
||||
dockerfile: Option<PathBuf>,
|
||||
},
|
||||
DockerTag(String),
|
||||
}
|
||||
impl ImageSource {
|
||||
#[instrument(skip_all)]
|
||||
pub fn load<'a, S: From<PackSource> + FileSource + Clone>(
|
||||
&'a self,
|
||||
tmpdir: &'a TmpDir,
|
||||
id: &'a PackageId,
|
||||
version: &'a VersionString,
|
||||
image_id: &'a ImageId,
|
||||
arch: &'a str,
|
||||
into: &'a mut DirectoryContents<S>,
|
||||
) -> BoxFuture<'a, Result<(), Error>> {
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "PascalCase")]
|
||||
struct DockerImageConfig {
|
||||
env: Vec<String>,
|
||||
#[serde(default)]
|
||||
working_dir: PathBuf,
|
||||
#[serde(default)]
|
||||
user: String,
|
||||
}
|
||||
async move {
|
||||
match self {
|
||||
ImageSource::Packed => Ok(()),
|
||||
ImageSource::DockerBuild {
|
||||
workdir,
|
||||
dockerfile,
|
||||
} => {
|
||||
let workdir = workdir.as_deref().unwrap_or(Path::new("."));
|
||||
let dockerfile = dockerfile
|
||||
.clone()
|
||||
.unwrap_or_else(|| workdir.join("Dockerfile"));
|
||||
let docker_platform = if arch == "x86_64" {
|
||||
"--platform=linux/amd64".to_owned()
|
||||
} else if arch == "aarch64" {
|
||||
"--platform=linux/arm64".to_owned()
|
||||
} else {
|
||||
format!("--platform=linux/{arch}")
|
||||
};
|
||||
// docker buildx build ${path} -o type=image,name=start9/${id}
|
||||
let tag = format!("start9/{id}/{image_id}:{version}");
|
||||
Command::new(CONTAINER_TOOL)
|
||||
.arg("build")
|
||||
.arg(workdir)
|
||||
.arg("-f")
|
||||
.arg(dockerfile)
|
||||
.arg("-t")
|
||||
.arg(&tag)
|
||||
.arg(&docker_platform)
|
||||
.arg("-o")
|
||||
.arg("type=image")
|
||||
.capture(false)
|
||||
.invoke(ErrorKind::Docker)
|
||||
.await?;
|
||||
ImageSource::DockerTag(tag.clone())
|
||||
.load(tmpdir, id, version, image_id, arch, into)
|
||||
.await?;
|
||||
Command::new(CONTAINER_TOOL)
|
||||
.arg("rmi")
|
||||
.arg("-f")
|
||||
.arg(&tag)
|
||||
.invoke(ErrorKind::Docker)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
ImageSource::DockerTag(tag) => {
|
||||
let docker_platform = if arch == "x86_64" {
|
||||
"--platform=linux/amd64".to_owned()
|
||||
} else if arch == "aarch64" {
|
||||
"--platform=linux/arm64".to_owned()
|
||||
} else {
|
||||
format!("--platform=linux/{arch}")
|
||||
};
|
||||
let mut inspect_cmd = Command::new(CONTAINER_TOOL);
|
||||
inspect_cmd
|
||||
.arg("image")
|
||||
.arg("inspect")
|
||||
.arg("--format")
|
||||
.arg("{{json .Config}}")
|
||||
.arg(&tag);
|
||||
let inspect_res = match inspect_cmd.invoke(ErrorKind::Docker).await {
|
||||
Ok(a) => a,
|
||||
Err(e)
|
||||
if {
|
||||
let msg = e.source.to_string();
|
||||
#[cfg(feature = "docker")]
|
||||
let matches = msg.contains("No such image:");
|
||||
#[cfg(not(feature = "docker"))]
|
||||
let matches = msg.contains(": image not known");
|
||||
matches
|
||||
} =>
|
||||
{
|
||||
Command::new(CONTAINER_TOOL)
|
||||
.arg("pull")
|
||||
.arg(&docker_platform)
|
||||
.arg(tag)
|
||||
.capture(false)
|
||||
.invoke(ErrorKind::Docker)
|
||||
.await?;
|
||||
inspect_cmd.invoke(ErrorKind::Docker).await?
|
||||
}
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
let config = serde_json::from_slice::<DockerImageConfig>(&inspect_res)
|
||||
.with_kind(ErrorKind::Deserialization)?;
|
||||
let base_path = Path::new("images").join(arch).join(image_id);
|
||||
into.insert_path(
|
||||
base_path.with_extension("json"),
|
||||
Entry::file(
|
||||
PackSource::Buffered(
|
||||
serde_json::to_vec(&ImageMetadata {
|
||||
workdir: if config.working_dir == Path::new("") {
|
||||
"/".into()
|
||||
} else {
|
||||
config.working_dir
|
||||
},
|
||||
user: if config.user.is_empty() {
|
||||
"root".into()
|
||||
} else {
|
||||
config.user.into()
|
||||
},
|
||||
})
|
||||
.with_kind(ErrorKind::Serialization)?
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)?;
|
||||
into.insert_path(
|
||||
base_path.with_extension("env"),
|
||||
Entry::file(
|
||||
PackSource::Buffered(config.env.join("\n").into_bytes().into()).into(),
|
||||
),
|
||||
)?;
|
||||
let dest = tmpdir.join(Guid::new().as_ref()).with_extension("squashfs");
|
||||
let container = String::from_utf8(
|
||||
Command::new(CONTAINER_TOOL)
|
||||
.arg("create")
|
||||
.arg(&docker_platform)
|
||||
.arg(&tag)
|
||||
.invoke(ErrorKind::Docker)
|
||||
.await?,
|
||||
)?;
|
||||
Command::new(CONTAINER_TOOL)
|
||||
.arg("export")
|
||||
.arg(container.trim())
|
||||
.pipe(Command::new("mksquashfs").arg("-").arg(&dest).arg("-tar"))
|
||||
.capture(false)
|
||||
.invoke(ErrorKind::Docker)
|
||||
.await?;
|
||||
Command::new(CONTAINER_TOOL)
|
||||
.arg("rm")
|
||||
.arg(container.trim())
|
||||
.invoke(ErrorKind::Docker)
|
||||
.await?;
|
||||
into.insert_path(
|
||||
base_path.with_extension("squashfs"),
|
||||
Entry::file(PackSource::File(dest).into()),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct ImageMetadata {
|
||||
pub workdir: PathBuf,
|
||||
#[ts(type = "string")]
|
||||
pub user: InternedString,
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
|
||||
let tmpdir = Arc::new(TmpDir::new().await?);
|
||||
let mut files = DirectoryContents::<PackSource>::new();
|
||||
let js_dir = params.javascript();
|
||||
let manifest: Arc<[u8]> = Command::new("node")
|
||||
.arg("-e")
|
||||
.arg(format!(
|
||||
"console.log(JSON.stringify(require('{}/index.js').manifest))",
|
||||
js_dir.display()
|
||||
))
|
||||
.invoke(ErrorKind::Javascript)
|
||||
.await?
|
||||
.into();
|
||||
files.insert(
|
||||
"manifest.json".into(),
|
||||
Entry::file(PackSource::Buffered(manifest.clone())),
|
||||
);
|
||||
let icon = params.icon().await?;
|
||||
let icon_ext = icon
|
||||
.extension()
|
||||
.or_not_found("icon file extension")?
|
||||
.to_string_lossy();
|
||||
files.insert(
|
||||
InternedString::from_display(&lazy_format!("icon.{}", icon_ext)),
|
||||
Entry::file(PackSource::File(icon)),
|
||||
);
|
||||
files.insert(
|
||||
"LICENSE.md".into(),
|
||||
Entry::file(PackSource::File(params.license())),
|
||||
);
|
||||
files.insert(
|
||||
"instructions.md".into(),
|
||||
Entry::file(PackSource::File(params.instructions())),
|
||||
);
|
||||
files.insert(
|
||||
"javascript.squashfs".into(),
|
||||
Entry::file(PackSource::Squashfs(Arc::new(SqfsDir::new(
|
||||
js_dir,
|
||||
tmpdir.clone(),
|
||||
)))),
|
||||
);
|
||||
|
||||
let mut s9pk = S9pk::new(
|
||||
MerkleArchive::new(files, ctx.developer_key()?.clone(), SIG_CONTEXT),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let assets_dir = params.assets();
|
||||
for assets in s9pk.as_manifest().assets.clone() {
|
||||
s9pk.as_archive_mut().contents_mut().insert_path(
|
||||
Path::new("assets").join(&assets).with_extension("squashfs"),
|
||||
Entry::file(PackSource::Squashfs(Arc::new(SqfsDir::new(
|
||||
assets_dir.join(&assets),
|
||||
tmpdir.clone(),
|
||||
)))),
|
||||
)?;
|
||||
}
|
||||
|
||||
s9pk.load_images(&*tmpdir).await?;
|
||||
|
||||
s9pk.validate_and_filter(None)?;
|
||||
|
||||
s9pk.serialize(
|
||||
&mut File::create(params.output(&s9pk.as_manifest().id)).await?,
|
||||
false,
|
||||
)
|
||||
.await?;
|
||||
|
||||
drop(s9pk);
|
||||
|
||||
tmpdir.gc().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Reference in New Issue
Block a user