add documentation for ai agents (#3115)

* add documentation for ai agents

* docs: consolidate CLAUDE.md and CONTRIBUTING.md, add style guidelines

- Refactor CLAUDE.md to reference CONTRIBUTING.md for build/test/format info
- Expand CONTRIBUTING.md with comprehensive build targets, env vars, and testing
- Add code style guidelines section with conventional commits
- Standardize SDK prettier config to use single quotes (matching web)
- Add project-level Claude Code settings to disable co-author attribution

* style(sdk): apply prettier with single quotes

Run prettier across sdk/base and sdk/package to apply the
standardized quote style (single quotes matching web).

* docs: add USER.md for per-developer TODO filtering

- Add agents/USER.md to .gitignore (contains user identifier)
- Document session startup flow in CLAUDE.md:
  - Create USER.md if missing, prompting for identifier
  - Filter TODOs by @username tags
  - Offer relevant TODOs on session start

* docs: add i18n documentation task to agent TODOs

* docs: document i18n ID patterns in core/

Add agents/i18n-patterns.md covering rust-i18n setup, translation file
format, t!() macro usage, key naming conventions, and locale selection.
Remove completed TODO item and add reference in CLAUDE.md.

* chore: clarify that all builds work on any OS with Docker
This commit is contained in:
Aiden McClelland
2026-02-06 00:10:16 +01:00
committed by GitHub
parent 86ca23c093
commit f2142f0bb3
280 changed files with 6793 additions and 5515 deletions

View File

@@ -4,11 +4,11 @@ import {
Manifest,
MerkleArchiveCommitment,
PackageId,
} from "../osBindings"
import { ArrayBufferReader, MerkleArchive } from "./merkleArchive"
import mime from "mime"
import { DirectoryContents } from "./merkleArchive/directoryContents"
import { FileContents } from "./merkleArchive/fileContents"
} from '../osBindings'
import { ArrayBufferReader, MerkleArchive } from './merkleArchive'
import mime from 'mime'
import { DirectoryContents } from './merkleArchive/directoryContents'
import { FileContents } from './merkleArchive/fileContents'
const magicAndVersion = new Uint8Array([59, 59, 2])
@@ -37,12 +37,12 @@ export class S9pk {
)
const magicVersion = new Uint8Array(header.next(magicAndVersion.length))
if (!compare(magicVersion, magicAndVersion)) {
throw new Error("Invalid Magic or Unexpected Version")
throw new Error('Invalid Magic or Unexpected Version')
}
const archive = await MerkleArchive.deserialize(
source,
"s9pk",
's9pk',
header,
commitment,
)
@@ -50,7 +50,7 @@ export class S9pk {
const manifest = JSON.parse(
new TextDecoder().decode(
await archive.contents
.getPath(["manifest.json"])
.getPath(['manifest.json'])
?.verifiedFileContents(),
),
)
@@ -60,24 +60,24 @@ export class S9pk {
async icon(): Promise<DataUrl> {
const iconName = Object.keys(this.archive.contents.contents).find(
(name) =>
name.startsWith("icon.") && mime.getType(name)?.startsWith("image/"),
name.startsWith('icon.') && mime.getType(name)?.startsWith('image/'),
)
if (!iconName) {
throw new Error("no icon found in archive")
throw new Error('no icon found in archive')
}
return (
`data:${mime.getType(iconName)};base64,` +
Buffer.from(
await this.archive.contents.getPath([iconName])!.verifiedFileContents(),
).toString("base64")
).toString('base64')
)
}
async dependencyMetadataFor(id: PackageId) {
const entry = this.archive.contents.getPath([
"dependencies",
'dependencies',
id,
"metadata.json",
'metadata.json',
])
if (!entry) return null
return JSON.parse(
@@ -86,18 +86,18 @@ export class S9pk {
}
async dependencyIconFor(id: PackageId) {
const dir = this.archive.contents.getPath(["dependencies", id])
const dir = this.archive.contents.getPath(['dependencies', id])
if (!dir || !(dir.contents instanceof DirectoryContents)) return null
const iconName = Object.keys(dir.contents.contents).find(
(name) =>
name.startsWith("icon.") && mime.getType(name)?.startsWith("image/"),
name.startsWith('icon.') && mime.getType(name)?.startsWith('image/'),
)
if (!iconName) return null
return (
`data:${mime.getType(iconName)};base64,` +
Buffer.from(
await dir.contents.getPath([iconName])!.verifiedFileContents(),
).toString("base64")
).toString('base64')
)
}
@@ -120,9 +120,9 @@ export class S9pk {
}
async license(): Promise<string> {
const file = this.archive.contents.getPath(["LICENSE.md"])
const file = this.archive.contents.getPath(['LICENSE.md'])
if (!file || !(file.contents instanceof FileContents))
throw new Error("license.md not found in archive")
throw new Error('license.md not found in archive')
return new TextDecoder().decode(await file.verifiedFileContents())
}
}

View File

@@ -1,8 +1,8 @@
import { ArrayBufferReader, Entry } from "."
import { blake3 } from "@noble/hashes/blake3"
import { serializeVarint } from "./varint"
import { FileContents } from "./fileContents"
import { compare } from ".."
import { ArrayBufferReader, Entry } from '.'
import { blake3 } from '@noble/hashes/blake3'
import { serializeVarint } from './varint'
import { FileContents } from './fileContents'
import { compare } from '..'
export class DirectoryContents {
static readonly headerSize =
@@ -18,7 +18,7 @@ export class DirectoryContents {
const position = header.nextU64()
const size = header.nextU64()
if (size > maxSize) {
throw new Error("size is greater than signed")
throw new Error('size is greater than signed')
}
const tocReader = new ArrayBufferReader(
@@ -37,7 +37,7 @@ export class DirectoryContents {
const res = new DirectoryContents(entries)
if (!compare(res.sighash(), sighash)) {
throw new Error("hash sum does not match")
throw new Error('hash sum does not match')
}
return res

View File

@@ -1,6 +1,6 @@
import { blake3 } from "@noble/hashes/blake3"
import { ArrayBufferReader } from "."
import { compare } from ".."
import { blake3 } from '@noble/hashes/blake3'
import { ArrayBufferReader } from '.'
import { compare } from '..'
export class FileContents {
private constructor(readonly contents: Blob) {}
@@ -17,7 +17,7 @@ export class FileContents {
async verified(hash: Uint8Array): Promise<ArrayBuffer> {
const res = await this.contents.arrayBuffer()
if (!compare(hash, blake3(new Uint8Array(res)))) {
throw new Error("hash sum mismatch")
throw new Error('hash sum mismatch')
}
return res
}

View File

@@ -1,10 +1,10 @@
import { MerkleArchiveCommitment } from "../../osBindings"
import { DirectoryContents } from "./directoryContents"
import { FileContents } from "./fileContents"
import { ed25519ph } from "@noble/curves/ed25519"
import { sha512 } from "@noble/hashes/sha2"
import { VarIntProcessor } from "./varint"
import { compare } from ".."
import { MerkleArchiveCommitment } from '../../osBindings'
import { DirectoryContents } from './directoryContents'
import { FileContents } from './fileContents'
import { ed25519ph } from '@noble/curves/ed25519'
import { sha512 } from '@noble/hashes/sha2'
import { VarIntProcessor } from './varint'
import { compare } from '..'
const maxVarstringLen = 1024 * 1024
@@ -33,7 +33,7 @@ export class ArrayBufferReader {
}
const res = p.decode()
if (res === null) {
throw new Error("Reached EOF")
throw new Error('Reached EOF')
}
return res
}
@@ -79,24 +79,24 @@ export class MerkleArchive {
},
)
) {
throw new Error("signature verification failed")
throw new Error('signature verification failed')
}
if (commitment) {
if (
!compare(
sighash,
new Uint8Array(Buffer.from(commitment.rootSighash, "base64").buffer),
new Uint8Array(Buffer.from(commitment.rootSighash, 'base64').buffer),
)
) {
throw new Error("merkle root mismatch")
throw new Error('merkle root mismatch')
}
if (maxSize > commitment.rootMaxsize) {
throw new Error("root directory max size too large")
throw new Error('root directory max size too large')
}
} else if (maxSize > 1024 * 1024) {
throw new Error(
"root directory max size over 1MiB, cancelling download in case of DOS attack",
'root directory max size over 1MiB, cancelling download in case of DOS attack',
)
}
@@ -137,10 +137,10 @@ export class Entry {
}
async verifiedFileContents(): Promise<ArrayBuffer> {
if (!this.contents) {
throw new Error("file is missing from archive")
throw new Error('file is missing from archive')
}
if (!(this.contents instanceof FileContents)) {
throw new Error("is not a regular file")
throw new Error('is not a regular file')
}
return this.contents.verified(this.hash)
}

View File

@@ -1,4 +1,4 @@
import { asError } from "../../util"
import { asError } from '../../util'
const msb = 0x80
const dropMsb = 0x7f
@@ -13,7 +13,7 @@ export class VarIntProcessor {
}
push(b: number) {
if (this.i >= maxSize) {
throw new Error("Unterminated varint")
throw new Error('Unterminated varint')
}
this.buf[this.i] = b
this.i += 1