Refactor/project structure (#3085)

* refactor project structure

* environment-based default registry

* fix tests

* update build container

* use docker platform for iso build emulation

* simplify compat

* Fix docker platform spec in run-compat.sh

* handle riscv compat

* fix bug with dep error exists attr

* undo removal of sorting

* use qemu for iso stage

---------

Co-authored-by: Mariusz Kogen <k0gen@pm.me>
Co-authored-by: Matt Hill <mattnine@protonmail.com>
This commit is contained in:
Aiden McClelland
2025-12-22 13:39:38 -07:00
committed by GitHub
parent eda08d5b0f
commit 96ae532879
389 changed files with 744 additions and 4005 deletions

256
core/src/s9pk/v2/compat.rs Normal file
View File

@@ -0,0 +1,256 @@
use std::collections::{BTreeMap, BTreeSet};
use std::path::Path;
use std::str::FromStr;
use std::sync::Arc;
use exver::{ExtendedVersion, VersionRange};
use tokio::io::{AsyncRead, AsyncSeek, AsyncWriteExt};
use tokio::process::Command;
use crate::dependencies::{DepInfo, Dependencies};
use crate::prelude::*;
use crate::s9pk::manifest::{DeviceFilter, Manifest};
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
use crate::s9pk::merkle_archive::source::TmpSource;
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
use crate::s9pk::v1::manifest::{Manifest as ManifestV1, PackageProcedure};
use crate::s9pk::v1::reader::S9pkReader;
use crate::s9pk::v2::pack::{CONTAINER_TOOL, ImageSource, PackSource};
use crate::s9pk::v2::{S9pk, SIG_CONTEXT};
use crate::util::Invoke;
use crate::util::io::{TmpDir, create_file};
use crate::{ImageId, VolumeId};
pub const MAGIC_AND_VERSION: &[u8] = &[0x3b, 0x3b, 0x01];
impl S9pk<TmpSource<PackSource>> {
#[instrument(skip_all)]
pub async fn from_v1<R: AsyncRead + AsyncSeek + Unpin + Send + Sync>(
mut reader: S9pkReader<R>,
tmp_dir: Arc<TmpDir>,
signer: ed25519_dalek::SigningKey,
) -> Result<Self, Error> {
Command::new(*CONTAINER_TOOL)
.arg("run")
.arg("--rm")
.arg("--privileged")
.arg("tonistiigi/binfmt")
.arg("--install")
.arg("all")
.invoke(ErrorKind::Docker)
.await?;
let mut archive = DirectoryContents::<TmpSource<PackSource>>::new();
// manifest.json
let manifest_raw = reader.manifest().await?;
let manifest = from_value::<ManifestV1>(manifest_raw.clone())?;
let mut new_manifest = Manifest::try_from(manifest.clone())?;
let images: BTreeSet<(ImageId, bool)> = manifest
.package_procedures()
.filter_map(|p| {
if let PackageProcedure::Docker(p) = p {
Some((p.image.clone(), p.system))
} else {
None
}
})
.collect();
// LICENSE.md
let license: Arc<[u8]> = reader.license().await?.to_vec().await?.into();
archive.insert_path(
"LICENSE.md",
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(license.into()),
)),
)?;
// icon.*
let icon: Arc<[u8]> = reader.icon().await?.to_vec().await?.into();
archive.insert_path(
format!("icon.{}", manifest.assets.icon_type()),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(icon.into()),
)),
)?;
// images
for arch in reader.docker_arches().await? {
Command::new(*CONTAINER_TOOL)
.arg("load")
.input(Some(&mut reader.docker_images(&arch).await?))
.invoke(ErrorKind::Docker)
.await?;
for (image, system) in &images {
let mut image_config = new_manifest.images.remove(image).unwrap_or_default();
image_config.arch.insert(arch.as_str().into());
new_manifest.images.insert(image.clone(), image_config);
let image_name = if *system {
format!("start9/{}:latest", image)
} else {
format!("start9/{}/{}:{}", manifest.id, image, manifest.version)
};
ImageSource::DockerTag(image_name.clone())
.load(
tmp_dir.clone(),
&new_manifest.id,
&new_manifest.version,
image,
&arch,
&mut archive,
)
.await?;
Command::new(*CONTAINER_TOOL)
.arg("rmi")
.arg("-f")
.arg(&image_name)
.invoke(ErrorKind::Docker)
.await?;
}
}
// assets
let asset_dir = tmp_dir.join("assets");
tokio::fs::create_dir_all(&asset_dir).await?;
tokio_tar::Archive::new(reader.assets().await?)
.unpack(&asset_dir)
.await?;
let sqfs_path = asset_dir.with_extension("squashfs");
Command::new("mksquashfs")
.arg(&asset_dir)
.arg(&sqfs_path)
.invoke(ErrorKind::Filesystem)
.await?;
archive.insert_path(
"assets.squashfs",
Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(sqfs_path))),
)?;
// javascript
let js_dir = tmp_dir.join("javascript");
let sqfs_path = js_dir.with_extension("squashfs");
tokio::fs::create_dir_all(&js_dir).await?;
if let Some(mut scripts) = reader.scripts().await? {
let mut js_file = create_file(js_dir.join("embassy.js")).await?;
tokio::io::copy(&mut scripts, &mut js_file).await?;
js_file.sync_all().await?;
}
{
let mut js_file = create_file(js_dir.join("embassyManifest.json")).await?;
js_file
.write_all(&serde_json::to_vec(&manifest_raw).with_kind(ErrorKind::Serialization)?)
.await?;
js_file.sync_all().await?;
}
Command::new("mksquashfs")
.arg(&js_dir)
.arg(&sqfs_path)
.invoke(ErrorKind::Filesystem)
.await?;
archive.insert_path(
Path::new("javascript.squashfs"),
Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(sqfs_path))),
)?;
archive.insert_path(
"manifest.json",
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(
serde_json::to_vec::<Manifest>(&new_manifest)
.with_kind(ErrorKind::Serialization)?
.into(),
),
)),
)?;
let mut res = S9pk::new(MerkleArchive::new(archive, signer, SIG_CONTEXT), None).await?;
res.as_archive_mut().update_hashes(true).await?;
Ok(res)
}
}
impl TryFrom<ManifestV1> for Manifest {
type Error = Error;
fn try_from(value: ManifestV1) -> Result<Self, Self::Error> {
let default_url = value.upstream_repo.clone();
let mut version = ExtendedVersion::from(
exver::emver::Version::from_str(&value.version)
.with_kind(ErrorKind::Deserialization)?,
);
if &*value.id == "bitcoind" && value.title.to_ascii_lowercase().contains("knots") {
version = version.with_flavor("knots");
} else if &*value.id == "lnd" || &*value.id == "ride-the-lightning" || &*value.id == "datum"
{
version = version.map_upstream(|v| v.with_prerelease(["beta".into()]));
} else if &*value.id == "lightning-terminal" || &*value.id == "robosats" {
version = version.map_upstream(|v| v.with_prerelease(["alpha".into()]));
}
Ok(Self {
id: value.id,
title: format!("{} (Legacy)", value.title).into(),
version: version.into(),
satisfies: BTreeSet::new(),
release_notes: value.release_notes,
can_migrate_from: VersionRange::any(),
can_migrate_to: VersionRange::none(),
license: value.license.into(),
wrapper_repo: value.wrapper_repo,
upstream_repo: value.upstream_repo,
support_site: value.support_site.unwrap_or_else(|| default_url.clone()),
marketing_site: value.marketing_site.unwrap_or_else(|| default_url.clone()),
donation_url: value.donation_url,
docs_url: None,
description: value.description,
images: BTreeMap::new(),
volumes: value
.volumes
.iter()
.filter(|(_, v)| v.get("type").and_then(|v| v.as_str()) == Some("data"))
.map(|(id, _)| id.clone())
.chain([VolumeId::from_str("embassy").unwrap()])
.collect(),
alerts: value.alerts,
dependencies: Dependencies(
value
.dependencies
.into_iter()
.map(|(id, value)| {
(
id,
DepInfo {
description: value.description,
optional: !value.requirement.required(),
metadata: None,
},
)
})
.collect(),
),
hardware_requirements: super::manifest::HardwareRequirements {
arch: value.hardware_requirements.arch,
ram: value.hardware_requirements.ram,
device: value
.hardware_requirements
.device
.into_iter()
.map(|(class, product)| DeviceFilter {
pattern_description: format!(
"a {class} device matching the expression {}",
product.as_ref()
),
class,
pattern: product,
})
.collect(),
},
git_hash: value.git_hash,
os_version: value.eos_version,
sdk_version: None,
})
}
}

View File

@@ -0,0 +1,224 @@
use std::collections::{BTreeMap, BTreeSet};
use std::path::Path;
use color_eyre::eyre::eyre;
use exver::{Version, VersionRange};
use imbl_value::InternedString;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use url::Url;
pub use crate::PackageId;
use crate::dependencies::Dependencies;
use crate::prelude::*;
use crate::s9pk::git_hash::GitHash;
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
use crate::s9pk::merkle_archive::expected::{Expected, Filter};
use crate::s9pk::v2::pack::ImageConfig;
use crate::util::serde::Regex;
use crate::util::{VersionString, mime};
use crate::version::{Current, VersionT};
use crate::{ImageId, VolumeId};
fn current_version() -> Version {
Current::default().semver()
}
#[derive(Clone, Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct Manifest {
pub id: PackageId,
#[ts(type = "string")]
pub title: InternedString,
pub version: VersionString,
pub satisfies: BTreeSet<VersionString>,
pub release_notes: String,
#[ts(type = "string")]
pub can_migrate_to: VersionRange,
#[ts(type = "string")]
pub can_migrate_from: VersionRange,
#[ts(type = "string")]
pub license: InternedString, // type of license
#[ts(type = "string")]
pub wrapper_repo: Url,
#[ts(type = "string")]
pub upstream_repo: Url,
#[ts(type = "string")]
pub support_site: Url,
#[ts(type = "string")]
pub marketing_site: Url,
#[ts(type = "string | null")]
pub donation_url: Option<Url>,
#[ts(type = "string | null")]
pub docs_url: Option<Url>,
pub description: Description,
pub images: BTreeMap<ImageId, ImageConfig>,
pub volumes: BTreeSet<VolumeId>,
#[serde(default)]
pub alerts: Alerts,
#[serde(default)]
pub dependencies: Dependencies,
#[serde(default)]
pub hardware_requirements: HardwareRequirements,
pub git_hash: Option<GitHash>,
#[serde(default = "current_version")]
#[ts(type = "string")]
pub os_version: Version,
#[ts(type = "string | null")]
pub sdk_version: Option<Version>,
}
impl Manifest {
pub fn validate_for<'a, T: Clone>(
&self,
arch: Option<&str>,
archive: &'a DirectoryContents<T>,
) -> Result<Filter, Error> {
let mut expected = Expected::new(archive);
expected.check_file("manifest.json")?;
expected.check_stem("icon", |ext| {
ext.and_then(|e| e.to_str())
.and_then(mime)
.map_or(false, |mime| mime.starts_with("image/"))
})?;
expected.check_file("LICENSE.md")?;
expected.check_file("javascript.squashfs")?;
for (dependency, _) in &self.dependencies.0 {
let dep_path = Path::new("dependencies").join(dependency);
let _ = expected.check_file(dep_path.join("metadata.json"));
let _ = expected.check_stem(dep_path.join("icon"), |ext| {
ext.and_then(|e| e.to_str())
.and_then(mime)
.map_or(false, |mime| mime.starts_with("image/"))
});
}
if let Err(e) = expected.check_file(Path::new("assets.squashfs")) {
// backwards compatibility for alpha s9pks - remove eventually
if expected.check_dir("assets").is_err() {
tracing::warn!("{e}");
tracing::debug!("{e:?}");
// return Err(e);
}
}
for (image_id, config) in &self.images {
let mut check_arch = |arch: &str| {
let mut arch = arch;
if let Err(e) = expected.check_file(
Path::new("images")
.join(arch)
.join(image_id)
.with_extension("squashfs"),
) {
if let Some(emulate_as) = &config.emulate_missing_as {
expected.check_file(
Path::new("images")
.join(arch)
.join(image_id)
.with_extension("squashfs"),
)?;
arch = &**emulate_as;
} else {
return Err(e);
}
}
expected.check_file(
Path::new("images")
.join(arch)
.join(image_id)
.with_extension("json"),
)?;
expected.check_file(
Path::new("images")
.join(arch)
.join(image_id)
.with_extension("env"),
)?;
Ok(())
};
if let Some(arch) = arch {
check_arch(arch)?;
} else if let Some(arches) = &self.hardware_requirements.arch {
for arch in arches {
check_arch(arch)?;
}
} else if let Some(arch) = config.emulate_missing_as.as_deref() {
if !config.arch.contains(arch) {
return Err(Error::new(
eyre!("`emulateMissingAs` must match an included `arch`"),
ErrorKind::ParseS9pk,
));
}
for arch in &config.arch {
check_arch(&arch)?;
}
} else {
return Err(Error::new(
eyre!(
"`emulateMissingAs` required for all images if no `arch` specified in `hardwareRequirements`"
),
ErrorKind::ParseS9pk,
));
}
}
Ok(expected.into_filter())
}
}
#[derive(Clone, Debug, Default, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct HardwareRequirements {
#[serde(default)]
pub device: Vec<DeviceFilter>,
#[ts(type = "number | null")]
pub ram: Option<u64>,
#[ts(type = "string[] | null")]
pub arch: Option<BTreeSet<InternedString>>,
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct DeviceFilter {
#[ts(type = "\"processor\" | \"display\"")]
pub class: InternedString,
#[ts(type = "string")]
pub pattern: Regex,
pub pattern_description: String,
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[ts(export)]
pub struct Description {
pub short: String,
pub long: String,
}
impl Description {
pub fn validate(&self) -> Result<(), Error> {
if self.short.chars().skip(160).next().is_some() {
return Err(Error::new(
eyre!("Short description must be 160 characters or less."),
crate::ErrorKind::ValidateS9pk,
));
}
if self.long.chars().skip(5000).next().is_some() {
return Err(Error::new(
eyre!("Long description must be 5000 characters or less."),
crate::ErrorKind::ValidateS9pk,
));
}
Ok(())
}
}
#[derive(Clone, Debug, Default, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct Alerts {
pub install: Option<String>,
pub uninstall: Option<String>,
pub restore: Option<String>,
pub start: Option<String>,
pub stop: Option<String>,
}

346
core/src/s9pk/v2/mod.rs Normal file
View File

@@ -0,0 +1,346 @@
use std::ffi::OsStr;
use std::path::Path;
use std::sync::Arc;
use imbl_value::InternedString;
use tokio::fs::File;
use crate::PackageId;
use crate::dependencies::DependencyMetadata;
use crate::prelude::*;
use crate::s9pk::manifest::Manifest;
use crate::s9pk::merkle_archive::sink::Sink;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::{
ArchiveSource, DynFileSource, FileSource, Section, TmpSource,
};
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
use crate::s9pk::v2::pack::{ImageSource, PackSource};
use crate::sign::commitment::merkle_archive::MerkleArchiveCommitment;
use crate::util::io::{TmpDir, open_file};
use crate::util::serde::IoFormat;
use crate::util::{DataUrl, mime};
const MAGIC_AND_VERSION: &[u8] = &[0x3b, 0x3b, 0x02];
pub const SIG_CONTEXT: &str = "s9pk";
pub mod compat;
pub mod manifest;
pub mod pack;
pub mod recipe;
/**
/
├── manifest.json
├── icon.<ext>
├── LICENSE.md
├── dependencies
│ └── <id>
│ ├── metadata.json
│ └── icon.<ext>
├── javascript.squashfs
├── assets
│ └── <id>.squashfs (xN)
└── images
└── <arch>
├── <id>.json (xN)
├── <id>.env (xN)
└── <id>.squashfs (xN)
*/
// this sorts the s9pk to optimize such that the parts that are used first appear earlier in the s9pk
// this is useful for manipulating an s9pk while partially downloaded on a source that does not support
// random access
fn priority(s: &str) -> Option<usize> {
match s {
"manifest.json" => Some(0),
a if Path::new(a).file_stem() == Some(OsStr::new("icon")) => Some(1),
"LICENSE.md" => Some(2),
"dependencies" => Some(3),
"javascript.squashfs" => Some(4),
"assets.squashfs" => Some(5),
"images" => Some(6),
_ => None,
}
}
#[derive(Clone)]
pub struct S9pk<S = Section<MultiCursorFile>> {
pub manifest: Manifest,
manifest_dirty: bool,
archive: MerkleArchive<S>,
size: Option<u64>,
}
impl<S> S9pk<S> {
pub fn as_manifest(&self) -> &Manifest {
&self.manifest
}
pub fn as_manifest_mut(&mut self) -> &mut Manifest {
self.manifest_dirty = true;
&mut self.manifest
}
pub fn as_archive(&self) -> &MerkleArchive<S> {
&self.archive
}
pub fn as_archive_mut(&mut self) -> &mut MerkleArchive<S> {
&mut self.archive
}
pub fn size(&self) -> Option<u64> {
self.size
}
}
impl<S: FileSource + Clone> S9pk<S> {
pub async fn new(archive: MerkleArchive<S>, size: Option<u64>) -> Result<Self, Error> {
let manifest = extract_manifest(&archive).await?;
Ok(Self {
manifest,
manifest_dirty: false,
archive,
size,
})
}
pub fn new_with_manifest(
archive: MerkleArchive<S>,
size: Option<u64>,
manifest: Manifest,
) -> Self {
Self {
manifest,
manifest_dirty: true,
archive,
size,
}
}
pub fn validate_and_filter(&mut self, arch: Option<&str>) -> Result<(), Error> {
let filter = self.manifest.validate_for(arch, self.archive.contents())?;
filter.keep_checked(self.archive.contents_mut())
}
pub async fn icon(&self) -> Result<(InternedString, Entry<S>), Error> {
let mut best_icon = None;
for (path, icon) in self.archive.contents().with_stem("icon").filter(|(p, v)| {
Path::new(&*p)
.extension()
.and_then(|e| e.to_str())
.and_then(mime)
.map_or(false, |e| e.starts_with("image/") && v.as_file().is_some())
}) {
let size = icon.expect_file()?.size().await?;
best_icon = match best_icon {
Some((s, a)) if s >= size => Some((s, a)),
_ => Some((size, (path, icon))),
};
}
best_icon
.map(|(_, a)| a)
.ok_or_else(|| Error::new(eyre!("no icon found in archive"), ErrorKind::ParseS9pk))
}
pub async fn icon_data_url(&self) -> Result<DataUrl<'static>, Error> {
let (name, contents) = self.icon().await?;
let mime = Path::new(&*name)
.extension()
.and_then(|e| e.to_str())
.and_then(mime)
.unwrap_or("image/png");
Ok(DataUrl::from_vec(
mime,
contents.expect_file()?.to_vec(contents.hash()).await?,
))
}
pub async fn dependency_icon(
&self,
id: &PackageId,
) -> Result<Option<(InternedString, Entry<S>)>, Error> {
let mut best_icon = None;
for (path, icon) in self
.archive
.contents()
.get_path(Path::new("dependencies").join(id))
.and_then(|p| p.as_directory())
.into_iter()
.flat_map(|d| {
d.with_stem("icon").filter(|(p, v)| {
Path::new(&*p)
.extension()
.and_then(|e| e.to_str())
.and_then(mime)
.map_or(false, |e| e.starts_with("image/") && v.as_file().is_some())
})
})
{
let size = icon.expect_file()?.size().await?;
best_icon = match best_icon {
Some((s, a)) if s >= size => Some((s, a)),
_ => Some((size, (path, icon))),
};
}
Ok(best_icon.map(|(_, a)| a))
}
pub async fn dependency_icon_data_url(
&self,
id: &PackageId,
) -> Result<Option<DataUrl<'static>>, Error> {
let Some((name, contents)) = self.dependency_icon(id).await? else {
return Ok(None);
};
let mime = Path::new(&*name)
.extension()
.and_then(|e| e.to_str())
.and_then(mime)
.unwrap_or("image/png");
Ok(Some(DataUrl::from_vec(
mime,
contents.expect_file()?.to_vec(contents.hash()).await?,
)))
}
pub async fn dependency_metadata(
&self,
id: &PackageId,
) -> Result<Option<DependencyMetadata>, Error> {
if let Some(entry) = self
.archive
.contents()
.get_path(Path::new("dependencies").join(id).join("metadata.json"))
{
Ok(Some(IoFormat::Json.from_slice(
&entry.expect_file()?.to_vec(entry.hash()).await?,
)?))
} else {
Ok(None)
}
}
pub async fn serialize<W: Sink>(&mut self, w: &mut W, verify: bool) -> Result<(), Error> {
use tokio::io::AsyncWriteExt;
w.write_all(MAGIC_AND_VERSION).await?;
if !self.manifest_dirty {
self.archive.serialize(w, verify).await?;
} else {
let mut dyn_s9pk = self.clone().into_dyn();
dyn_s9pk.as_archive_mut().contents_mut().insert_path(
"manifest.json",
Entry::file(DynFileSource::new(Arc::<[u8]>::from(
serde_json::to_vec(&self.manifest).with_kind(ErrorKind::Serialization)?,
))),
)?;
dyn_s9pk.archive.serialize(w, verify).await?;
}
Ok(())
}
pub fn into_dyn(self) -> S9pk<DynFileSource> {
S9pk {
manifest: self.manifest,
manifest_dirty: self.manifest_dirty,
archive: self.archive.into_dyn(),
size: self.size,
}
}
}
impl<S: From<TmpSource<PackSource>> + FileSource + Clone> S9pk<S> {
pub async fn load_images(&mut self, tmp_dir: Arc<TmpDir>) -> Result<(), Error> {
let id = &self.manifest.id;
let version = &self.manifest.version;
for (image_id, image_config) in &mut self.manifest.images {
self.manifest_dirty = true;
for arch in &image_config.arch {
image_config
.source
.load(
tmp_dir.clone(),
id,
version,
image_id,
arch,
self.archive.contents_mut(),
)
.await?;
}
image_config.source = ImageSource::Packed;
}
Ok(())
}
}
impl<S: ArchiveSource + Clone> S9pk<Section<S>> {
#[instrument(skip_all)]
pub async fn archive(
source: &S,
commitment: Option<&MerkleArchiveCommitment>,
) -> Result<MerkleArchive<Section<S>>, Error> {
use tokio::io::AsyncReadExt;
let mut header = source
.fetch(
0,
MAGIC_AND_VERSION.len() as u64 + MerkleArchive::<Section<S>>::header_size(),
)
.await?;
let mut magic_version = [0u8; MAGIC_AND_VERSION.len()];
header.read_exact(&mut magic_version).await?;
ensure_code!(
&magic_version == MAGIC_AND_VERSION,
ErrorKind::ParseS9pk,
"Invalid Magic or Unexpected Version"
);
MerkleArchive::deserialize(source, SIG_CONTEXT, &mut header, commitment).await
}
#[instrument(skip_all)]
pub async fn deserialize(
source: &S,
commitment: Option<&MerkleArchiveCommitment>,
) -> Result<Self, Error> {
let mut archive = Self::archive(source, commitment).await?;
archive.sort_by(|a, b| match (priority(a), priority(b)) {
(Some(a), Some(b)) => a.cmp(&b),
(Some(_), None) => std::cmp::Ordering::Less,
(None, Some(_)) => std::cmp::Ordering::Greater,
(None, None) => std::cmp::Ordering::Equal,
});
Self::new(archive, source.size().await).await
}
}
impl S9pk {
pub async fn from_file(file: File) -> Result<Self, Error> {
Self::deserialize(&MultiCursorFile::from(file), None).await
}
pub async fn open(path: impl AsRef<Path>, id: Option<&PackageId>) -> Result<Self, Error> {
let res = Self::from_file(open_file(path).await?).await?;
if let Some(id) = id {
ensure_code!(
&res.as_manifest().id == id,
ErrorKind::ValidateS9pk,
"manifest.id does not match expected"
);
}
Ok(res)
}
}
async fn extract_manifest<S: FileSource>(archive: &MerkleArchive<S>) -> Result<Manifest, Error> {
let manifest = serde_json::from_slice(
&archive
.contents()
.get_path("manifest.json")
.or_not_found("manifest.json")?
.read_file_to_vec()
.await?,
)
.with_kind(ErrorKind::Deserialization)?;
Ok(manifest)
}

864
core/src/s9pk/v2/pack.rs Normal file
View File

@@ -0,0 +1,864 @@
use std::collections::{BTreeMap, BTreeSet};
use std::path::{Path, PathBuf};
use std::process::Stdio;
use std::sync::{Arc, LazyLock, OnceLock};
use clap::Parser;
use futures::future::{BoxFuture, ready};
use futures::{FutureExt, TryStreamExt};
use imbl_value::InternedString;
use serde::{Deserialize, Serialize};
use tokio::process::Command;
use tokio::sync::OnceCell;
use tokio_stream::wrappers::ReadDirStream;
use tracing::{debug, warn};
use ts_rs::TS;
use crate::context::CliContext;
use crate::dependencies::{DependencyMetadata, MetadataSrc};
use crate::prelude::*;
use crate::rpc_continuations::Guid;
use crate::s9pk::S9pk;
use crate::s9pk::git_hash::GitHash;
use crate::s9pk::manifest::Manifest;
use crate::s9pk::merkle_archive::directory_contents::DirectoryContents;
use crate::s9pk::merkle_archive::source::http::HttpSource;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::s9pk::merkle_archive::source::{
ArchiveSource, DynFileSource, DynRead, FileSource, TmpSource, into_dyn_read,
};
use crate::s9pk::merkle_archive::{Entry, MerkleArchive};
use crate::s9pk::v2::SIG_CONTEXT;
use crate::util::io::{TmpDir, create_file, open_file};
use crate::util::serde::IoFormat;
use crate::util::{DataUrl, Invoke, PathOrUrl, VersionString, new_guid};
use crate::{ImageId, PackageId};
pub static PREFER_DOCKER: OnceLock<bool> = OnceLock::new();
pub static CONTAINER_TOOL: LazyLock<&'static str> = LazyLock::new(|| {
if *PREFER_DOCKER.get_or_init(|| false) {
if std::process::Command::new("which")
.arg("docker")
.stdout(Stdio::null())
.status()
.map_or(false, |o| o.success())
{
"docker"
} else {
"podman"
}
} else {
"podman"
}
});
pub static CONTAINER_DATADIR: LazyLock<&'static str> = LazyLock::new(|| {
if *CONTAINER_TOOL == "docker" {
"/var/lib/docker"
} else {
"/var/lib/containers"
}
});
pub struct SqfsDir {
path: PathBuf,
tmpdir: Arc<TmpDir>,
sqfs: OnceCell<MultiCursorFile>,
}
impl SqfsDir {
pub fn new(path: PathBuf, tmpdir: Arc<TmpDir>) -> Self {
Self {
path,
tmpdir,
sqfs: OnceCell::new(),
}
}
async fn file(&self) -> Result<&MultiCursorFile, Error> {
self.sqfs
.get_or_try_init(|| async move {
let guid = Guid::new();
let path = self.tmpdir.join(guid.as_ref()).with_extension("squashfs");
if self.path.extension().and_then(|s| s.to_str()) == Some("tar") {
tar2sqfs(&self.path)?
.input(Some(&mut open_file(&self.path).await?))
.invoke(ErrorKind::Filesystem)
.await?;
} else {
Command::new("mksquashfs")
.arg(&self.path)
.arg(&path)
.arg("-quiet")
.invoke(ErrorKind::Filesystem)
.await?;
}
Ok(MultiCursorFile::from(
open_file(&path)
.await
.with_ctx(|_| (ErrorKind::Filesystem, path.display()))?,
))
})
.await
}
}
#[derive(Clone)]
pub enum PackSource {
Buffered(Arc<[u8]>),
File(PathBuf),
Squashfs(Arc<SqfsDir>),
}
impl FileSource for PackSource {
type Reader = DynRead;
type SliceReader = DynRead;
async fn size(&self) -> Result<u64, Error> {
match self {
Self::Buffered(a) => Ok(a.len() as u64),
Self::File(f) => Ok(tokio::fs::metadata(f)
.await
.with_ctx(|_| (ErrorKind::Filesystem, f.display()))?
.len()),
Self::Squashfs(dir) => dir
.file()
.await
.with_ctx(|_| (ErrorKind::Filesystem, dir.path.display()))?
.size()
.await
.or_not_found("file metadata"),
}
}
async fn reader(&self) -> Result<Self::Reader, Error> {
match self {
Self::Buffered(a) => Ok(into_dyn_read(FileSource::reader(a).await?)),
Self::File(f) => Ok(into_dyn_read(FileSource::reader(f).await?)),
Self::Squashfs(dir) => dir.file().await?.fetch_all().await.map(into_dyn_read),
}
}
async fn slice(&self, position: u64, size: u64) -> Result<Self::SliceReader, Error> {
match self {
Self::Buffered(a) => Ok(into_dyn_read(FileSource::slice(a, position, size).await?)),
Self::File(f) => Ok(into_dyn_read(FileSource::slice(f, position, size).await?)),
Self::Squashfs(dir) => dir
.file()
.await?
.fetch(position, size)
.await
.map(into_dyn_read),
}
}
}
impl From<PackSource> for DynFileSource {
fn from(value: PackSource) -> Self {
DynFileSource::new(value)
}
}
#[derive(Deserialize, Serialize, Parser)]
pub struct PackParams {
pub path: Option<PathBuf>,
#[arg(short, long)]
pub output: Option<PathBuf>,
#[arg(long)]
pub javascript: Option<PathBuf>,
#[arg(long)]
pub icon: Option<PathBuf>,
#[arg(long)]
pub license: Option<PathBuf>,
#[arg(long, conflicts_with = "no-assets")]
pub assets: Option<PathBuf>,
#[arg(long, conflicts_with = "assets")]
pub no_assets: bool,
#[arg(long, help = "Architecture Mask")]
pub arch: Vec<InternedString>,
}
impl PackParams {
fn path(&self) -> &Path {
self.path.as_deref().unwrap_or(Path::new("."))
}
fn output(&self, id: &PackageId) -> PathBuf {
self.output
.as_ref()
.cloned()
.unwrap_or_else(|| self.path().join(id).with_extension("s9pk"))
}
fn javascript(&self) -> PathBuf {
self.javascript
.as_ref()
.cloned()
.unwrap_or_else(|| self.path().join("javascript"))
}
async fn icon(&self) -> Result<PathBuf, Error> {
if let Some(icon) = &self.icon {
Ok(icon.clone())
} else {
ReadDirStream::new(tokio::fs::read_dir(self.path()).await?)
.try_filter(|x| {
ready(
x.path()
.file_stem()
.map_or(false, |s| s.eq_ignore_ascii_case("icon")),
)
})
.map_err(Error::from)
.try_fold(
Err(Error::new(eyre!("icon not found"), ErrorKind::NotFound)),
|acc, x| async move {
match acc {
Ok(_) => Err(Error::new(eyre!("multiple icons found in working directory, please specify which to use with `--icon`"), ErrorKind::InvalidRequest)),
Err(e) => Ok({
let path = x.path();
if path
.file_stem()
.map_or(false, |s| s.eq_ignore_ascii_case("icon"))
{
Ok(path)
} else {
Err(e)
}
}),
}
},
)
.await?
}
}
async fn license(&self) -> Result<PathBuf, Error> {
if let Some(license) = &self.license {
Ok(license.clone())
} else {
ReadDirStream::new(tokio::fs::read_dir(self.path()).await?)
.try_filter(|x| {
ready(
x.path()
.file_stem()
.map_or(false, |s| s.eq_ignore_ascii_case("license")),
)
})
.map_err(Error::from)
.try_fold(
Err(Error::new(eyre!("license not found"), ErrorKind::NotFound)),
|acc, x| async move {
match acc {
Ok(_) => Err(Error::new(eyre!("multiple licenses found in working directory, please specify which to use with `--license`"), ErrorKind::InvalidRequest)),
Err(e) => Ok({
let path = x.path();
if path
.file_stem()
.map_or(false, |s| s.eq_ignore_ascii_case("license"))
{
Ok(path)
} else {
Err(e)
}
}),
}
},
)
.await?
}
}
fn assets(&self) -> PathBuf {
self.assets
.as_ref()
.cloned()
.unwrap_or_else(|| self.path().join("assets"))
}
}
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct ImageConfig {
pub source: ImageSource,
#[ts(type = "string[]")]
pub arch: BTreeSet<InternedString>,
#[ts(type = "string | null")]
pub emulate_missing_as: Option<InternedString>,
}
impl Default for ImageConfig {
fn default() -> Self {
Self {
source: ImageSource::Packed,
arch: BTreeSet::new(),
emulate_missing_as: None,
}
}
}
#[derive(Parser)]
struct CliImageConfig {
#[arg(long, conflicts_with("docker-tag"))]
docker_build: bool,
#[arg(long, requires("docker-build"))]
dockerfile: Option<PathBuf>,
#[arg(long, requires("docker-build"))]
workdir: Option<PathBuf>,
#[arg(long, conflicts_with_all(["dockerfile", "workdir"]))]
docker_tag: Option<String>,
#[arg(long)]
arch: Vec<InternedString>,
#[arg(long)]
emulate_missing_as: Option<InternedString>,
}
impl TryFrom<CliImageConfig> for ImageConfig {
type Error = clap::Error;
fn try_from(value: CliImageConfig) -> Result<Self, Self::Error> {
let res = Self {
source: if value.docker_build {
ImageSource::DockerBuild {
dockerfile: value.dockerfile,
workdir: value.workdir,
build_args: None,
}
} else if let Some(tag) = value.docker_tag {
ImageSource::DockerTag(tag)
} else {
ImageSource::Packed
},
arch: value.arch.into_iter().collect(),
emulate_missing_as: value.emulate_missing_as,
};
res.emulate_missing_as
.as_ref()
.map(|a| {
if !res.arch.contains(a) {
Err(clap::Error::raw(
clap::error::ErrorKind::InvalidValue,
"`emulate-missing-as` must match one of the provided `arch`es",
))
} else {
Ok(())
}
})
.transpose()?;
Ok(res)
}
}
impl clap::Args for ImageConfig {
fn augment_args(cmd: clap::Command) -> clap::Command {
CliImageConfig::augment_args(cmd)
}
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
CliImageConfig::augment_args_for_update(cmd)
}
}
impl clap::FromArgMatches for ImageConfig {
fn from_arg_matches(matches: &clap::ArgMatches) -> Result<Self, clap::Error> {
Self::try_from(CliImageConfig::from_arg_matches(matches)?)
}
fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> {
*self = Self::try_from(CliImageConfig::from_arg_matches(matches)?)?;
Ok(())
}
}
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
#[ts(export)]
pub enum BuildArg {
String(String),
EnvVar { env: String },
}
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub enum ImageSource {
Packed,
#[serde(rename_all = "camelCase")]
DockerBuild {
#[ts(optional)]
workdir: Option<PathBuf>,
#[ts(optional)]
dockerfile: Option<PathBuf>,
#[serde(skip_serializing_if = "Option::is_none")]
#[ts(optional)]
build_args: Option<BTreeMap<String, BuildArg>>,
},
DockerTag(String),
// Recipe(DirRecipe),
}
impl ImageSource {
pub fn ingredients(&self) -> Vec<PathBuf> {
match self {
Self::Packed => Vec::new(),
Self::DockerBuild {
dockerfile,
workdir,
..
} => {
vec![
workdir
.as_deref()
.unwrap_or(Path::new("."))
.join(dockerfile.as_deref().unwrap_or(Path::new("Dockerfile"))),
]
}
Self::DockerTag(_) => Vec::new(),
}
}
#[instrument(skip_all)]
pub fn load<'a, S: From<TmpSource<PackSource>> + FileSource + Clone>(
&'a self,
tmp_dir: Arc<TmpDir>,
id: &'a PackageId,
version: &'a VersionString,
image_id: &'a ImageId,
arch: &'a str,
into: &'a mut DirectoryContents<S>,
) -> BoxFuture<'a, Result<(), Error>> {
#[derive(Deserialize)]
#[serde(rename_all = "PascalCase")]
struct DockerImageConfig {
env: Vec<String>,
#[serde(default)]
working_dir: PathBuf,
#[serde(default)]
user: String,
entrypoint: Option<Vec<String>>,
cmd: Option<Vec<String>>,
}
async move {
match self {
ImageSource::Packed => Ok(()),
ImageSource::DockerBuild {
workdir,
dockerfile,
build_args,
} => {
let workdir = workdir.as_deref().unwrap_or(Path::new("."));
let dockerfile = dockerfile
.clone()
.unwrap_or_else(|| workdir.join("Dockerfile"));
let docker_platform = if arch == "x86_64" {
"--platform=linux/amd64".to_owned()
} else if arch == "aarch64" {
"--platform=linux/arm64".to_owned()
} else {
format!("--platform=linux/{arch}")
};
// docker buildx build ${path} -o type=image,name=start9/${id}
let tag = format!("start9/{id}/{image_id}:{}", new_guid());
let mut command = Command::new(*CONTAINER_TOOL);
if *CONTAINER_TOOL == "docker" {
command.arg("buildx");
}
command
.arg("build")
.arg(workdir)
.arg("-f")
.arg(dockerfile)
.arg("-t")
.arg(&tag)
.arg(&docker_platform)
.arg("--build-arg")
.arg(format!("ARCH={}", arch));
// add build arguments
if let Some(build_args) = build_args {
for (key, value) in build_args {
let build_arg_value = match value {
BuildArg::String(val) => val.to_string(),
BuildArg::EnvVar { env } => {
match std::env::var(&env) {
Ok(val) => val,
Err(_) => continue, // skip if env var not set or invalid
}
}
};
command
.arg("--build-arg")
.arg(format!("{}={}", key, build_arg_value));
}
}
command
.arg("-o")
.arg("type=docker,dest=-")
.capture(false)
.pipe(Command::new(*CONTAINER_TOOL).arg("load"))
.invoke(ErrorKind::Docker)
.await?;
ImageSource::DockerTag(tag.clone())
.load(tmp_dir, id, version, image_id, arch, into)
.await?;
Command::new(*CONTAINER_TOOL)
.arg("rmi")
.arg("-f")
.arg(&tag)
.invoke(ErrorKind::Docker)
.await?;
Ok(())
}
ImageSource::DockerTag(tag) => {
let docker_platform = if arch == "x86_64" {
"--platform=linux/amd64".to_owned()
} else if arch == "aarch64" {
"--platform=linux/arm64".to_owned()
} else {
format!("--platform=linux/{arch}")
};
let container = String::from_utf8(
Command::new(*CONTAINER_TOOL)
.arg("create")
.arg(&docker_platform)
.arg(&tag)
.invoke(ErrorKind::Docker)
.await?,
)?;
let container = container.trim();
let config = serde_json::from_slice::<DockerImageConfig>(
&Command::new(*CONTAINER_TOOL)
.arg("container")
.arg("inspect")
.arg("--format")
.arg("{{json .Config}}")
.arg(container)
.invoke(ErrorKind::Docker)
.await?,
)
.with_kind(ErrorKind::Deserialization)?;
let base_path = Path::new("images").join(arch).join(image_id);
into.insert_path(
base_path.with_extension("json"),
Entry::file(
TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(
serde_json::to_vec(&ImageMetadata {
workdir: if config.working_dir == Path::new("") {
"/".into()
} else {
config.working_dir
},
user: if config.user.is_empty() {
"root".into()
} else {
config.user.into()
},
entrypoint: config.entrypoint,
cmd: config.cmd,
})
.with_kind(ErrorKind::Serialization)?
.into(),
),
)
.into(),
),
)?;
into.insert_path(
base_path.with_extension("env"),
Entry::file(
TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(config.env.join("\n").into_bytes().into()),
)
.into(),
),
)?;
let dest = tmp_dir
.join(Guid::new().as_ref())
.with_extension("squashfs");
Command::new(*CONTAINER_TOOL)
.arg("export")
.arg(container)
.pipe(&mut tar2sqfs(&dest)?)
.capture(false)
.invoke(ErrorKind::Docker)
.await?;
Command::new(*CONTAINER_TOOL)
.arg("rm")
.arg(container)
.invoke(ErrorKind::Docker)
.await?;
into.insert_path(
base_path.with_extension("squashfs"),
Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(dest)).into()),
)?;
Ok(())
}
}
}
.boxed()
}
}
fn tar2sqfs(dest: impl AsRef<Path>) -> Result<Command, Error> {
let dest = dest.as_ref();
Ok({
#[cfg(target_os = "linux")]
{
let mut command = Command::new("tar2sqfs");
command.arg("-q").arg(&dest);
command
}
#[cfg(target_os = "macos")]
{
let directory = dest
.parent()
.unwrap_or_else(|| Path::new("/"))
.to_path_buf();
let mut command = Command::new(*CONTAINER_TOOL);
command
.arg("run")
.arg("-i")
.arg("--rm")
.arg("--mount")
.arg(format!("type=bind,src={},dst=/data", directory.display()))
.arg("ghcr.io/start9labs/sdk/utils:latest")
.arg("tar2sqfs")
.arg("-q")
.arg(Path::new("/data").join(&dest.file_name().unwrap_or_default()));
command
}
})
}
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct ImageMetadata {
pub workdir: PathBuf,
#[ts(type = "string")]
pub user: InternedString,
pub entrypoint: Option<Vec<String>>,
pub cmd: Option<Vec<String>>,
}
#[instrument(skip_all)]
pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> {
let tmp_dir = Arc::new(TmpDir::new().await?);
let mut files = DirectoryContents::<TmpSource<PackSource>>::new();
let js_dir = params.javascript();
let manifest: Arc<[u8]> = Command::new("node")
.arg("-e")
.arg(format!(
"console.log(JSON.stringify(require('{}/index.js').manifest))",
js_dir.display()
))
.invoke(ErrorKind::Javascript)
.await?
.into();
files.insert(
"manifest.json".into(),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(manifest.clone()),
)),
);
let icon = params.icon().await?;
let icon_ext = icon
.extension()
.or_not_found("icon file extension")?
.to_string_lossy();
files.insert(
InternedString::from_display(&lazy_format!("icon.{}", icon_ext)),
Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(icon))),
);
files.insert(
"LICENSE.md".into(),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::File(params.license().await?),
)),
);
files.insert(
"javascript.squashfs".into(),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Squashfs(Arc::new(SqfsDir::new(js_dir, tmp_dir.clone()))),
)),
);
let mut s9pk = S9pk::new(
MerkleArchive::new(files, ctx.developer_key()?.clone(), SIG_CONTEXT),
None,
)
.await?;
let manifest = s9pk.as_manifest_mut();
manifest.git_hash = Some(GitHash::from_path(params.path()).await?);
if !params.arch.is_empty() {
let arches = match manifest.hardware_requirements.arch.take() {
Some(a) => params
.arch
.iter()
.filter(|x| a.contains(*x))
.cloned()
.collect(),
None => params.arch.iter().cloned().collect(),
};
manifest
.images
.values_mut()
.for_each(|c| c.arch = c.arch.intersection(&arches).cloned().collect());
manifest.hardware_requirements.arch = Some(arches);
}
if !params.no_assets {
let assets_dir = params.assets();
s9pk.as_archive_mut().contents_mut().insert_path(
"assets.squashfs",
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Squashfs(Arc::new(SqfsDir::new(assets_dir, tmp_dir.clone()))),
)),
)?;
}
s9pk.load_images(tmp_dir.clone()).await?;
let mut to_insert = Vec::new();
for (id, dependency) in &mut s9pk.as_manifest_mut().dependencies.0 {
if let Some((title, icon)) = match dependency.metadata.take() {
Some(MetadataSrc::Metadata(metadata)) => {
let icon = match metadata.icon {
PathOrUrl::Path(path) => DataUrl::from_path(path).await?,
PathOrUrl::Url(url) => {
if url.scheme() == "http" || url.scheme() == "https" {
DataUrl::from_response(ctx.client.get(url).send().await?).await?
} else if url.scheme() == "data" {
url.as_str().parse()?
} else {
return Err(Error::new(
eyre!("unknown scheme: {}", url.scheme()),
ErrorKind::InvalidRequest,
));
}
}
};
Some((metadata.title, icon))
}
Some(MetadataSrc::S9pk(Some(s9pk))) => {
let s9pk = match s9pk {
PathOrUrl::Path(path) => {
S9pk::deserialize(&MultiCursorFile::from(open_file(path).await?), None)
.await?
.into_dyn()
}
PathOrUrl::Url(url) => {
if url.scheme() == "http" || url.scheme() == "https" {
S9pk::deserialize(
&Arc::new(HttpSource::new(ctx.client.clone(), url).await?),
None,
)
.await?
.into_dyn()
} else {
return Err(Error::new(
eyre!("unknown scheme: {}", url.scheme()),
ErrorKind::InvalidRequest,
));
}
}
};
Some((
s9pk.as_manifest().title.clone(),
s9pk.icon_data_url().await?,
))
}
Some(MetadataSrc::S9pk(None)) | None => {
warn!("no metadata specified for {id}, leaving metadata empty");
None
}
} {
let dep_path = Path::new("dependencies").join(id);
to_insert.push((
dep_path.join("metadata.json"),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(
IoFormat::Json.to_vec(&DependencyMetadata { title })?.into(),
),
)),
));
to_insert.push((
dep_path
.join("icon")
.with_extension(icon.canonical_ext().unwrap_or("ico")),
Entry::file(TmpSource::new(
tmp_dir.clone(),
PackSource::Buffered(icon.data.into_owned().into()),
)),
));
}
}
for (path, source) in to_insert {
s9pk.as_archive_mut()
.contents_mut()
.insert_path(path, source)?;
}
s9pk.validate_and_filter(None)?;
s9pk.serialize(
&mut create_file(params.output(&s9pk.as_manifest().id)).await?,
false,
)
.await?;
drop(s9pk);
tmp_dir.gc().await?;
Ok(())
}
#[instrument(skip_all)]
pub async fn list_ingredients(_: CliContext, params: PackParams) -> Result<Vec<PathBuf>, Error> {
let js_path = params.javascript().join("index.js");
let manifest: Manifest = match async {
serde_json::from_slice(
&Command::new("node")
.arg("-e")
.arg(format!(
"console.log(JSON.stringify(require('{}').manifest))",
js_path.display()
))
.invoke(ErrorKind::Javascript)
.await?,
)
.with_kind(ErrorKind::Deserialization)
}
.await
{
Ok(m) => m,
Err(e) => {
warn!("failed to load manifest: {e}");
debug!("{e:?}");
return Ok(vec![js_path, params.icon().await?, params.license().await?]);
}
};
let mut ingredients = vec![js_path, params.icon().await?, params.license().await?];
for (_, dependency) in manifest.dependencies.0 {
match dependency.metadata {
Some(MetadataSrc::Metadata(crate::dependencies::Metadata {
icon: PathOrUrl::Path(icon),
..
})) => {
ingredients.push(icon);
}
Some(MetadataSrc::S9pk(Some(PathOrUrl::Path(s9pk)))) => {
ingredients.push(s9pk);
}
_ => (),
}
}
if !params.no_assets {
let assets_dir = params.assets();
ingredients.push(assets_dir);
}
for image in manifest.images.values() {
ingredients.extend(image.source.ingredients());
}
Ok(ingredients)
}

View File

@@ -0,0 +1,21 @@
use std::collections::BTreeMap;
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use url::Url;
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
pub struct DirRecipe(BTreeMap<PathBuf, Recipe>);
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
pub enum Recipe {
Make(PathBuf),
Wget {
#[ts(type = "string")]
url: Url,
checksum: String,
},
Recipe(DirRecipe),
}

View File

@@ -0,0 +1,89 @@
## Magic
`0x3b3b`
## Version
`0x02` (varint)
## Merkle Archive
### Header
- ed25519 pubkey (32B)
- ed25519 signature of TOC sighash (64B)
- TOC sighash: (32B)
- TOC position: (8B: u64 BE)
- TOC size: (8B: u64 BE)
### TOC
- number of entries (varint)
- FOREACH section
- name (varstring)
- hash (32B: BLAKE-3 of file contents / TOC sighash)
- TYPE (1B)
- TYPE=MISSING (`0x00`)
- TYPE=FILE (`0x01`)
- position (8B: u64 BE)
- size (8B: u64 BE)
- TYPE=TOC (`0x02`)
- position (8B: u64 BE)
- size (8B: u64 BE)
#### SigHash
Hash of TOC with all contents MISSING
### FILE
`<File contents>`
# Example
`foo/bar/baz.txt`
ROOT TOC:
- 1 section
- name: foo
hash: sighash('a)
type: TOC
position: 'a
length: _
'a:
- 1 section
- name: bar
hash: sighash('b)
type: TOC
position: 'b
size: _
'b:
- 2 sections
- name: baz.txt
hash: hash('c)
type: FILE
position: 'c
length: _
- name: qux
hash: `<unverifiable>`
type: MISSING
'c: `<CONTENTS OF baz.txt>`
"foo/"
hash: _
size: 15b
"bar.txt"
hash: _
size: 5b
`<CONTENTS OF foo/>` (
"baz.txt"
hash: _
size: 2b
)
`<CONTENTS OF bar.txt>` ("hello")
`<CONTENTS OF baz.txt>` ("hi")