mirror of
https://github.com/Start9Labs/start-sdk.git
synced 2026-03-26 02:11:56 +00:00
chore: Convert to a node style
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,2 +1,3 @@
|
||||
.vscode
|
||||
lib
|
||||
dist/
|
||||
node_modules/
|
||||
|
||||
25
Makefile
25
Makefile
@@ -1,18 +1,23 @@
|
||||
TS_FILES := $(shell find ./**/*.ts )
|
||||
version = $(shell git tag --sort=committerdate | tail -1)
|
||||
test: $(TS_FILES) utils/test/output.ts
|
||||
deno test test.ts
|
||||
deno check mod.ts
|
||||
# test: $(TS_FILES) utils/test/output.ts
|
||||
# deno test test.ts
|
||||
# deno check mod.ts
|
||||
|
||||
utils/test/output.ts: utils/test/config.json scripts/oldSpecToBuilder.ts
|
||||
cat utils/test/config.json | deno run scripts/oldSpecToBuilder.ts "../../mod.ts" |deno fmt - > utils/test/output.ts
|
||||
cat utils/test/config.json | deno run scripts/oldSpecToBuilder.ts "../../mod" |deno fmt - > utils/test/output.ts
|
||||
|
||||
bundle: test fmt $(TS_FILES)
|
||||
echo "Version: $(version)"
|
||||
deno run --allow-net --allow-write --allow-env --allow-run --allow-read build.ts $(version)
|
||||
bundle: fmt $(TS_FILES) .FORCE node_modules
|
||||
rm -rf dist || true
|
||||
npx tsc-multi
|
||||
npx tsc --emitDeclarationOnly
|
||||
|
||||
fmt:
|
||||
deno fmt
|
||||
fmt: node_modules
|
||||
npx prettier --write "**/*.ts"
|
||||
|
||||
node_modules: package.json
|
||||
npm install
|
||||
|
||||
publish: bundle
|
||||
cd lib && npm publish
|
||||
cd lib && npm publish
|
||||
.FORCE:
|
||||
12
README.md
12
README.md
@@ -1,7 +1,17 @@
|
||||
<<<<<<< Updated upstream
|
||||
|
||||
# embassy-sdk-ts
|
||||
|
||||
### Generate: Config class from legacy ConfigSpec
|
||||
|
||||
```sh
|
||||
cat utils/test/config.json | deno run https://deno.land/x/embassyd_sdk/scripts/oldSpecToBuilder.ts "../../mod.ts" |deno fmt - > utils/test/output.ts
|
||||
cat utils/test/config.json | deno run https://deno.land/x/embassyd_sdk/scripts/oldSpecToBuilder.ts "../../mod" |deno fmt - > utils/test/output.ts
|
||||
```
|
||||
|
||||
=======
|
||||
|
||||
# Start SDK
|
||||
|
||||
For making the patterns that are wanted in making services for the startOS.
|
||||
|
||||
> > > > > > > Stashed changes
|
||||
|
||||
30
build.ts
30
build.ts
@@ -1,30 +0,0 @@
|
||||
// ex. scripts/build_npm.ts
|
||||
import { build, emptyDir } from "https://deno.land/x/dnt@0.33.1/mod.ts";
|
||||
|
||||
await emptyDir("./lib");
|
||||
await build({
|
||||
entryPoints: ["./mod.ts"],
|
||||
outDir: "./lib",
|
||||
shims: {
|
||||
// see JS docs for overview and more options
|
||||
deno: true,
|
||||
},
|
||||
package: {
|
||||
// package.json properties
|
||||
name: "embassy-sdk-ts",
|
||||
version: Deno.args[0],
|
||||
description: "Sdk that is used by the embassy packages, and the OS.",
|
||||
license: "MIT",
|
||||
sideEffects: false,
|
||||
repository: {
|
||||
type: "git",
|
||||
url: "git+https://github.com/Start9Labs/embassy-sdk-ts.git",
|
||||
},
|
||||
bugs: {
|
||||
url: "https://github.com/Start9Labs/embassy-sdk-ts/issues",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// post build steps
|
||||
Deno.copyFileSync("./README.md", "lib/README.md");
|
||||
@@ -1,4 +0,0 @@
|
||||
export { noPropertiesFound, properties, propertiesv2 } from "./properties.ts";
|
||||
export { setConfig } from "./setConfig.ts";
|
||||
export { getConfig, getConfigAndMatcher } from "./getConfig.ts";
|
||||
export * as migrations from "./migrations.ts";
|
||||
@@ -1,48 +0,0 @@
|
||||
import { Config } from "./config.ts";
|
||||
import { Value } from "./value.ts";
|
||||
import { expect } from "https://deno.land/x/expect@v0.2.9/mod.ts";
|
||||
const { test } = Deno;
|
||||
|
||||
test("String", () => {
|
||||
const bitcoinPropertiesBuilt: {
|
||||
"peer-tor-address": {
|
||||
name: string;
|
||||
description: string | null;
|
||||
type: "string";
|
||||
};
|
||||
} = Config.of(
|
||||
{
|
||||
"peer-tor-address": Value.string({
|
||||
name: "Peer tor address",
|
||||
default: "",
|
||||
description: "The Tor address of the peer interface",
|
||||
warning: null,
|
||||
nullable: false,
|
||||
masked: true,
|
||||
placeholder: null,
|
||||
pattern: null,
|
||||
"pattern-description": null,
|
||||
textarea: null,
|
||||
}),
|
||||
},
|
||||
).build();
|
||||
expect(JSON.stringify(bitcoinPropertiesBuilt)).toEqual(
|
||||
/*json*/ `{
|
||||
"peer-tor-address": {
|
||||
"type": "string",
|
||||
"name": "Peer tor address",
|
||||
"default": "",
|
||||
"description": "The Tor address of the peer interface",
|
||||
"warning": null,
|
||||
"nullable": false,
|
||||
"masked": true,
|
||||
"placeholder": null,
|
||||
"pattern": null,
|
||||
"pattern-description": null,
|
||||
"textarea": null
|
||||
}}`
|
||||
.replaceAll("\n", " ")
|
||||
.replaceAll(/\s{2,}/g, "")
|
||||
.replaceAll(": ", ":"),
|
||||
);
|
||||
});
|
||||
@@ -1,13 +0,0 @@
|
||||
import { Config } from "./config.ts";
|
||||
import { List } from "./list.ts";
|
||||
import { Value } from "./value.ts";
|
||||
import { Variants } from "./variants.ts";
|
||||
|
||||
export {
|
||||
/** @typedef { import("./config.ts").Config } Pet
|
||||
*/
|
||||
Config,
|
||||
List,
|
||||
Value,
|
||||
Variants,
|
||||
};
|
||||
@@ -1,3 +0,0 @@
|
||||
export * as matches from "https://deno.land/x/ts_matches@v5.4.1/mod.ts";
|
||||
export * as YAML from "https://deno.land/std@0.177.0/encoding/yaml.ts";
|
||||
export * as TOML from "https://deno.land/std@0.177.0/encoding/toml.ts";
|
||||
@@ -1,244 +0,0 @@
|
||||
import { expect } from "https://deno.land/x/expect@v0.2.9/mod.ts";
|
||||
import { EmVer, notRange, rangeAnd, rangeOf, rangeOr } from "./mod.ts";
|
||||
const { test } = Deno;
|
||||
|
||||
{
|
||||
const checker = rangeOf("*");
|
||||
test("rangeOf('*')", () => {
|
||||
expect(checker.check("1")).toBe(true);
|
||||
expect(checker.check("1.2")).toBe(true);
|
||||
expect(checker.check("1.2.3.4")).toBe(true);
|
||||
});
|
||||
test("rangeOf('*') invalid", () => {
|
||||
expect(() => checker.check("a")).toThrow();
|
||||
expect(() => checker.check("")).toThrow();
|
||||
expect(() => checker.check("1..3")).toThrow();
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const checker = rangeOf(">1.2.3.4");
|
||||
test(`rangeOf(">1.2.3.4") valid`, () => {
|
||||
expect(checker.check("2")).toBe(true);
|
||||
expect(checker.check("1.2.3.5")).toBe(true);
|
||||
expect(checker.check("1.2.3.4.1")).toBe(true);
|
||||
});
|
||||
|
||||
test(`rangeOf(">1.2.3.4") invalid`, () => {
|
||||
expect(checker.check("1.2.3.4")).toBe(false);
|
||||
expect(checker.check("1.2.3")).toBe(false);
|
||||
expect(checker.check("1")).toBe(false);
|
||||
});
|
||||
}
|
||||
{
|
||||
const checker = rangeOf("=1.2.3");
|
||||
test(`rangeOf("=1.2.3") valid`, () => {
|
||||
expect(checker.check("1.2.3")).toBe(true);
|
||||
});
|
||||
|
||||
test(`rangeOf("=1.2.3") invalid`, () => {
|
||||
expect(checker.check("2")).toBe(false);
|
||||
expect(checker.check("1.2.3.1")).toBe(false);
|
||||
expect(checker.check("1.2")).toBe(false);
|
||||
});
|
||||
}
|
||||
{
|
||||
const checker = rangeOf(">=1.2.3.4");
|
||||
test(`rangeOf(">=1.2.3.4") valid`, () => {
|
||||
expect(checker.check("2")).toBe(true);
|
||||
expect(checker.check("1.2.3.5")).toBe(true);
|
||||
expect(checker.check("1.2.3.4.1")).toBe(true);
|
||||
expect(checker.check("1.2.3.4")).toBe(true);
|
||||
});
|
||||
|
||||
test(`rangeOf(">=1.2.3.4") invalid`, () => {
|
||||
expect(checker.check("1.2.3")).toBe(false);
|
||||
expect(checker.check("1")).toBe(false);
|
||||
});
|
||||
}
|
||||
{
|
||||
const checker = rangeOf("<1.2.3.4");
|
||||
test(`rangeOf("<1.2.3.4") invalid`, () => {
|
||||
expect(checker.check("2")).toBe(false);
|
||||
expect(checker.check("1.2.3.5")).toBe(false);
|
||||
expect(checker.check("1.2.3.4.1")).toBe(false);
|
||||
expect(checker.check("1.2.3.4")).toBe(false);
|
||||
});
|
||||
|
||||
test(`rangeOf("<1.2.3.4") valid`, () => {
|
||||
expect(checker.check("1.2.3")).toBe(true);
|
||||
expect(checker.check("1")).toBe(true);
|
||||
});
|
||||
}
|
||||
{
|
||||
const checker = rangeOf("<=1.2.3.4");
|
||||
test(`rangeOf("<=1.2.3.4") invalid`, () => {
|
||||
expect(checker.check("2")).toBe(false);
|
||||
expect(checker.check("1.2.3.5")).toBe(false);
|
||||
expect(checker.check("1.2.3.4.1")).toBe(false);
|
||||
});
|
||||
|
||||
test(`rangeOf("<=1.2.3.4") valid`, () => {
|
||||
expect(checker.check("1.2.3")).toBe(true);
|
||||
expect(checker.check("1")).toBe(true);
|
||||
expect(checker.check("1.2.3.4")).toBe(true);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const checkA = rangeOf(">1");
|
||||
const checkB = rangeOf("<=2");
|
||||
|
||||
const checker = rangeAnd(checkA, checkB);
|
||||
test(`simple and(checkers) valid`, () => {
|
||||
expect(checker.check("2")).toBe(true);
|
||||
|
||||
expect(checker.check("1.1")).toBe(true);
|
||||
});
|
||||
test(`simple and(checkers) invalid`, () => {
|
||||
expect(checker.check("2.1")).toBe(false);
|
||||
expect(checker.check("1")).toBe(false);
|
||||
expect(checker.check("0")).toBe(false);
|
||||
});
|
||||
}
|
||||
{
|
||||
const checkA = rangeOf("<1");
|
||||
const checkB = rangeOf("=2");
|
||||
|
||||
const checker = rangeOr(checkA, checkB);
|
||||
test(`simple or(checkers) valid`, () => {
|
||||
expect(checker.check("2")).toBe(true);
|
||||
expect(checker.check("0.1")).toBe(true);
|
||||
});
|
||||
test(`simple or(checkers) invalid`, () => {
|
||||
expect(checker.check("2.1")).toBe(false);
|
||||
expect(checker.check("1")).toBe(false);
|
||||
expect(checker.check("1.1")).toBe(false);
|
||||
});
|
||||
}
|
||||
{
|
||||
const checker = rangeOf("1.2.*");
|
||||
test(`rangeOf(1.2.*) valid`, () => {
|
||||
expect(checker.check("1.2")).toBe(true);
|
||||
expect(checker.check("1.2.1")).toBe(true);
|
||||
});
|
||||
test(`rangeOf(1.2.*) invalid`, () => {
|
||||
expect(checker.check("1.3")).toBe(false);
|
||||
expect(checker.check("1.3.1")).toBe(false);
|
||||
|
||||
expect(checker.check("1.1.1")).toBe(false);
|
||||
expect(checker.check("1.1")).toBe(false);
|
||||
expect(checker.check("1")).toBe(false);
|
||||
|
||||
expect(checker.check("2")).toBe(false);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const checker = notRange(rangeOf("1.2.*"));
|
||||
test(`notRange(rangeOf(1.2.*)) valid`, () => {
|
||||
expect(checker.check("1.3")).toBe(true);
|
||||
expect(checker.check("1.3.1")).toBe(true);
|
||||
|
||||
expect(checker.check("1.1.1")).toBe(true);
|
||||
expect(checker.check("1.1")).toBe(true);
|
||||
expect(checker.check("1")).toBe(true);
|
||||
|
||||
expect(checker.check("2")).toBe(true);
|
||||
});
|
||||
test(`notRange(rangeOf(1.2.*)) invalid `, () => {
|
||||
expect(checker.check("1.2")).toBe(false);
|
||||
expect(checker.check("1.2.1")).toBe(false);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const checker = rangeOf("!1.2.*");
|
||||
test(`!(rangeOf(1.2.*)) valid`, () => {
|
||||
expect(checker.check("1.3")).toBe(true);
|
||||
expect(checker.check("1.3.1")).toBe(true);
|
||||
|
||||
expect(checker.check("1.1.1")).toBe(true);
|
||||
expect(checker.check("1.1")).toBe(true);
|
||||
expect(checker.check("1")).toBe(true);
|
||||
|
||||
expect(checker.check("2")).toBe(true);
|
||||
});
|
||||
test(`!(rangeOf(1.2.*)) invalid `, () => {
|
||||
expect(checker.check("1.2")).toBe(false);
|
||||
expect(checker.check("1.2.1")).toBe(false);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
test(`no and ranges`, () => {
|
||||
expect(() => rangeAnd()).toThrow();
|
||||
});
|
||||
test(`no or ranges`, () => {
|
||||
expect(() => rangeOr()).toThrow();
|
||||
});
|
||||
}
|
||||
{
|
||||
const checker = rangeOf("!>1.2.3.4");
|
||||
test(`rangeOf("!>1.2.3.4") invalid`, () => {
|
||||
expect(checker.check("2")).toBe(false);
|
||||
expect(checker.check("1.2.3.5")).toBe(false);
|
||||
expect(checker.check("1.2.3.4.1")).toBe(false);
|
||||
});
|
||||
|
||||
test(`rangeOf("!>1.2.3.4") valid`, () => {
|
||||
expect(checker.check("1.2.3.4")).toBe(true);
|
||||
expect(checker.check("1.2.3")).toBe(true);
|
||||
expect(checker.check("1")).toBe(true);
|
||||
});
|
||||
}
|
||||
|
||||
test(">1 && =1.2", () => {
|
||||
const checker = rangeOf(">1 && =1.2");
|
||||
|
||||
expect(checker.check("1.2")).toBe(true);
|
||||
expect(checker.check("1.2.1")).toBe(false);
|
||||
});
|
||||
test("=1 || =2", () => {
|
||||
const checker = rangeOf("=1 || =2");
|
||||
|
||||
expect(checker.check("1")).toBe(true);
|
||||
expect(checker.check("2")).toBe(true);
|
||||
expect(checker.check("3")).toBe(false);
|
||||
});
|
||||
|
||||
test(">1 && =1.2 || =2", () => {
|
||||
const checker = rangeOf(">1 && =1.2 || =2");
|
||||
|
||||
expect(checker.check("1.2")).toBe(true);
|
||||
expect(checker.check("1")).toBe(false);
|
||||
expect(checker.check("2")).toBe(true);
|
||||
expect(checker.check("3")).toBe(false);
|
||||
});
|
||||
|
||||
test("&& before || order of operationns: <1.5 && >1 || >1.5 && <3", () => {
|
||||
const checker = rangeOf("<1.5 && >1 || >1.5 && <3");
|
||||
expect(checker.check("1.1")).toBe(true);
|
||||
expect(checker.check("2")).toBe(true);
|
||||
|
||||
expect(checker.check("1.5")).toBe(false);
|
||||
expect(checker.check("1")).toBe(false);
|
||||
expect(checker.check("3")).toBe(false);
|
||||
});
|
||||
|
||||
test("Compare function on the emver", () => {
|
||||
const a = EmVer.from("1.2.3");
|
||||
const b = EmVer.from("1.2.4");
|
||||
|
||||
expect(a.compare(b) === "less");
|
||||
expect(b.compare(a) === "greater");
|
||||
expect(a.compare(a) === "equal");
|
||||
});
|
||||
test("Compare for sort function on the emver", () => {
|
||||
const a = EmVer.from("1.2.3");
|
||||
const b = EmVer.from("1.2.4");
|
||||
|
||||
expect(a.compareForSort(b) === -1);
|
||||
expect(b.compareForSort(a) === 1);
|
||||
expect(a.compareForSort(a) === 0);
|
||||
});
|
||||
@@ -1,48 +0,0 @@
|
||||
import { Effects, ResultType } from "./types.ts";
|
||||
import { error, errorCode, isKnownError, ok } from "./util.ts";
|
||||
export const checkWebUrl: (
|
||||
url: string,
|
||||
) => (effects: Effects, duration: number) => Promise<ResultType<null | void>> =
|
||||
(url) => {
|
||||
return async (effects, duration) => {
|
||||
let errorValue;
|
||||
if (
|
||||
// deno-lint-ignore no-cond-assign
|
||||
errorValue = guardDurationAboveMinimum({ duration, minimumTime: 5000 })
|
||||
) return errorValue;
|
||||
|
||||
return await effects.fetch(url)
|
||||
.then((_) => ok)
|
||||
.catch((e) => {
|
||||
effects.warn(`Error while fetching URL: ${url}`);
|
||||
effects.error(JSON.stringify(e));
|
||||
effects.error(e.toString());
|
||||
return error(`Error while fetching URL: ${url}`);
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
export const runHealthScript =
|
||||
({ command, args }: { command: string; args: string[] }) =>
|
||||
async (
|
||||
effects: Effects,
|
||||
_duration: number,
|
||||
): Promise<ResultType<null | void>> => {
|
||||
const res = await effects.runCommand({ command, args });
|
||||
if ("result" in res) {
|
||||
return { result: null };
|
||||
} else {
|
||||
return res;
|
||||
}
|
||||
};
|
||||
|
||||
// Ensure the starting duration is pass a minimum
|
||||
export const guardDurationAboveMinimum = (
|
||||
input: { duration: number; minimumTime: number },
|
||||
) => (input.duration <= input.minimumTime) ? errorCode(60, "Starting") : null;
|
||||
|
||||
export const catchError = (effects: Effects) => (e: unknown) => {
|
||||
if (isKnownError(e)) return e;
|
||||
effects.error(`Health check failed: ${e}`);
|
||||
return error("Error while running health check");
|
||||
};
|
||||
6
jest.config.js
Normal file
6
jest.config.js
Normal file
@@ -0,0 +1,6 @@
|
||||
/** @type {import('ts-jest').JestConfigWithTsJest} */
|
||||
module.exports = {
|
||||
preset: "ts-jest",
|
||||
automock: false,
|
||||
testEnvironment: "node",
|
||||
};
|
||||
@@ -1,43 +0,0 @@
|
||||
src/
|
||||
esm/config/index.test.js
|
||||
script/config/index.test.js
|
||||
types/config/index.test.d.ts
|
||||
esm/emver-lite/test.js
|
||||
script/emver-lite/test.js
|
||||
types/emver-lite/test.d.ts
|
||||
esm/test.js
|
||||
script/test.js
|
||||
types/test.d.ts
|
||||
esm/utils/propertiesMatcher.test.js
|
||||
script/utils/propertiesMatcher.test.js
|
||||
types/utils/propertiesMatcher.test.d.ts
|
||||
esm/utils/test/output.js
|
||||
script/utils/test/output.js
|
||||
types/utils/test/output.d.ts
|
||||
esm/deps/deno.land/std@0.97.0/fmt/colors.js
|
||||
script/deps/deno.land/std@0.97.0/fmt/colors.js
|
||||
types/deps/deno.land/std@0.97.0/fmt/colors.d.ts
|
||||
esm/deps/deno.land/std@0.97.0/testing/_diff.js
|
||||
script/deps/deno.land/std@0.97.0/testing/_diff.js
|
||||
types/deps/deno.land/std@0.97.0/testing/_diff.d.ts
|
||||
esm/deps/deno.land/std@0.97.0/testing/asserts.js
|
||||
script/deps/deno.land/std@0.97.0/testing/asserts.js
|
||||
types/deps/deno.land/std@0.97.0/testing/asserts.d.ts
|
||||
esm/deps/deno.land/x/expect@v0.2.9/expect.js
|
||||
script/deps/deno.land/x/expect@v0.2.9/expect.js
|
||||
types/deps/deno.land/x/expect@v0.2.9/expect.d.ts
|
||||
esm/deps/deno.land/x/expect@v0.2.9/matchers.js
|
||||
script/deps/deno.land/x/expect@v0.2.9/matchers.js
|
||||
types/deps/deno.land/x/expect@v0.2.9/matchers.d.ts
|
||||
esm/deps/deno.land/x/expect@v0.2.9/mock.js
|
||||
script/deps/deno.land/x/expect@v0.2.9/mock.js
|
||||
types/deps/deno.land/x/expect@v0.2.9/mock.d.ts
|
||||
esm/deps/deno.land/x/expect@v0.2.9/mod.js
|
||||
script/deps/deno.land/x/expect@v0.2.9/mod.js
|
||||
types/deps/deno.land/x/expect@v0.2.9/mod.d.ts
|
||||
esm/_dnt.test_shims.js
|
||||
script/_dnt.test_shims.js
|
||||
types/_dnt.test_shims.d.ts
|
||||
test_runner.js
|
||||
yarn.lock
|
||||
pnpm-lock.yaml
|
||||
@@ -1,7 +0,0 @@
|
||||
# embassy-sdk-ts
|
||||
|
||||
### Generate: Config class from legacy ConfigSpec
|
||||
|
||||
```sh
|
||||
cat utils/test/config.json | deno run scripts/oldSpecToBuilder.ts "../../config/mod.ts" |deno fmt - > utils/test/output.ts
|
||||
```
|
||||
@@ -1,5 +1,5 @@
|
||||
import { ok } from "./util.ts";
|
||||
import * as T from "./types.ts";
|
||||
import { ok } from "./util";
|
||||
import * as T from "./types";
|
||||
|
||||
export const DEFAULT_OPTIONS: T.BackupOptions = {
|
||||
delete: true,
|
||||
@@ -41,20 +41,19 @@ export class Backups {
|
||||
|
||||
constructor(
|
||||
private options = DEFAULT_OPTIONS,
|
||||
private backupSet = [] as BackupSet[],
|
||||
) {
|
||||
}
|
||||
private backupSet = [] as BackupSet[]
|
||||
) {}
|
||||
static volumes(...volumeNames: string[]) {
|
||||
return new Backups().addSets(...volumeNames.map((srcVolume) => ({
|
||||
srcVolume,
|
||||
srcPath: "./",
|
||||
dstPath: `./${srcVolume}/`,
|
||||
dstVolume: Backups.BACKUP,
|
||||
})));
|
||||
return new Backups().addSets(
|
||||
...volumeNames.map((srcVolume) => ({
|
||||
srcVolume,
|
||||
srcPath: "./",
|
||||
dstPath: `./${srcVolume}/`,
|
||||
dstVolume: Backups.BACKUP,
|
||||
}))
|
||||
);
|
||||
}
|
||||
static addSets(
|
||||
...options: BackupSet[]
|
||||
) {
|
||||
static addSets(...options: BackupSet[]) {
|
||||
return new Backups().addSets(...options);
|
||||
}
|
||||
static with_options(options?: Partial<T.BackupOptions>) {
|
||||
@@ -68,50 +67,56 @@ export class Backups {
|
||||
return this;
|
||||
}
|
||||
volumes(...volumeNames: string[]) {
|
||||
return this.addSets(...volumeNames.map((srcVolume) => ({
|
||||
srcVolume,
|
||||
srcPath: "./",
|
||||
dstPath: `./${srcVolume}/`,
|
||||
dstVolume: Backups.BACKUP,
|
||||
})));
|
||||
return this.addSets(
|
||||
...volumeNames.map((srcVolume) => ({
|
||||
srcVolume,
|
||||
srcPath: "./",
|
||||
dstPath: `./${srcVolume}/`,
|
||||
dstVolume: Backups.BACKUP,
|
||||
}))
|
||||
);
|
||||
}
|
||||
addSets(
|
||||
...options: BackupSet[]
|
||||
) {
|
||||
addSets(...options: BackupSet[]) {
|
||||
options.forEach((x) =>
|
||||
this.backupSet.push({ ...x, options: { ...this.options, ...x.options } })
|
||||
);
|
||||
return this;
|
||||
}
|
||||
build() {
|
||||
const createBackup: T.ExpectedExports.createBackup = async (
|
||||
{ effects },
|
||||
) => {
|
||||
const previousItems = (await effects.readDir({
|
||||
volumeId: Backups.BACKUP,
|
||||
path: ".",
|
||||
}).catch(() => [])).map((x) => `${x}`);
|
||||
const backupPaths = this.backupSet.filter((x) =>
|
||||
x.dstVolume === Backups.BACKUP
|
||||
).map((x) => x.dstPath).map((x) => x.replace(/\.\/([^]*)\//, "$1"));
|
||||
const filteredItems = previousItems.filter((x) =>
|
||||
backupPaths.indexOf(x) === -1
|
||||
const createBackup: T.ExpectedExports.createBackup = async ({
|
||||
effects,
|
||||
}) => {
|
||||
const previousItems = (
|
||||
await effects
|
||||
.readDir({
|
||||
volumeId: Backups.BACKUP,
|
||||
path: ".",
|
||||
})
|
||||
.catch(() => [])
|
||||
).map((x) => `${x}`);
|
||||
const backupPaths = this.backupSet
|
||||
.filter((x) => x.dstVolume === Backups.BACKUP)
|
||||
.map((x) => x.dstPath)
|
||||
.map((x) => x.replace(/\.\/([^]*)\//, "$1"));
|
||||
const filteredItems = previousItems.filter(
|
||||
(x) => backupPaths.indexOf(x) === -1
|
||||
);
|
||||
for (
|
||||
const itemToRemove of filteredItems
|
||||
) {
|
||||
for (const itemToRemove of filteredItems) {
|
||||
effects.error(`Trying to remove ${itemToRemove}`);
|
||||
await effects.removeDir({
|
||||
volumeId: Backups.BACKUP,
|
||||
path: itemToRemove,
|
||||
}).catch(() =>
|
||||
effects.removeFile({
|
||||
await effects
|
||||
.removeDir({
|
||||
volumeId: Backups.BACKUP,
|
||||
path: itemToRemove,
|
||||
})
|
||||
).catch(() => {
|
||||
effects.warn(`Failed to remove ${itemToRemove} from backup volume`);
|
||||
});
|
||||
.catch(() =>
|
||||
effects.removeFile({
|
||||
volumeId: Backups.BACKUP,
|
||||
path: itemToRemove,
|
||||
})
|
||||
)
|
||||
.catch(() => {
|
||||
effects.warn(`Failed to remove ${itemToRemove} from backup volume`);
|
||||
});
|
||||
}
|
||||
for (const item of this.backupSet) {
|
||||
if (notEmptyPath(item.dstPath)) {
|
||||
@@ -120,19 +125,21 @@ export class Backups {
|
||||
path: item.dstPath,
|
||||
});
|
||||
}
|
||||
await effects.runRsync({
|
||||
...item,
|
||||
options: {
|
||||
...this.options,
|
||||
...item.options,
|
||||
},
|
||||
}).wait();
|
||||
await effects
|
||||
.runRsync({
|
||||
...item,
|
||||
options: {
|
||||
...this.options,
|
||||
...item.options,
|
||||
},
|
||||
})
|
||||
.wait();
|
||||
}
|
||||
return ok;
|
||||
};
|
||||
const restoreBackup: T.ExpectedExports.restoreBackup = async (
|
||||
{ effects },
|
||||
) => {
|
||||
const restoreBackup: T.ExpectedExports.restoreBackup = async ({
|
||||
effects,
|
||||
}) => {
|
||||
for (const item of this.backupSet) {
|
||||
if (notEmptyPath(item.srcPath)) {
|
||||
await effects.createDir({
|
||||
@@ -140,16 +147,18 @@ export class Backups {
|
||||
path: item.srcPath,
|
||||
});
|
||||
}
|
||||
await effects.runRsync({
|
||||
options: {
|
||||
...this.options,
|
||||
...item.options,
|
||||
},
|
||||
srcVolume: item.dstVolume,
|
||||
dstVolume: item.srcVolume,
|
||||
srcPath: item.dstPath,
|
||||
dstPath: item.srcPath,
|
||||
}).wait();
|
||||
await effects
|
||||
.runRsync({
|
||||
options: {
|
||||
...this.options,
|
||||
...item.options,
|
||||
},
|
||||
srcVolume: item.dstVolume,
|
||||
dstVolume: item.srcVolume,
|
||||
srcPath: item.dstPath,
|
||||
dstPath: item.srcPath,
|
||||
})
|
||||
.wait();
|
||||
}
|
||||
return ok;
|
||||
};
|
||||
@@ -1,9 +1,9 @@
|
||||
import { Config } from "../config_builder/config.ts";
|
||||
import { YAML } from "../dependencies.ts";
|
||||
import { matches } from "../dependencies.ts";
|
||||
import { LegacyExpectedExports as ExpectedExports } from "../types.ts";
|
||||
import { ConfigSpec } from "../types/config-types.ts";
|
||||
import { TypeFromProps, typeFromProps } from "../utils/propertiesMatcher.ts";
|
||||
import { Config } from "../config_builder/config";
|
||||
import * as matches from "ts-matches";
|
||||
import * as YAML from "yaml";
|
||||
import { LegacyExpectedExports as ExpectedExports } from "../types";
|
||||
import { ConfigSpec } from "../types/config-types";
|
||||
import { TypeFromProps, typeFromProps } from "../utils/propertiesMatcher";
|
||||
|
||||
const { any, string, dictionary } = matches;
|
||||
|
||||
@@ -49,10 +49,10 @@ export const getConfig =
|
||||
* @returns A funnction for getConfig and the matcher for the spec sent in
|
||||
*/
|
||||
export const getConfigAndMatcher = <Spec extends ConfigSpec>(
|
||||
spec: Config<Spec>,
|
||||
spec: Config<Spec>
|
||||
): [
|
||||
ExpectedExports.getConfig,
|
||||
matches.Parser<unknown, TypeFromProps<Spec>>,
|
||||
matches.Parser<unknown, TypeFromProps<Spec>>
|
||||
] => {
|
||||
const specBuilt: Spec = spec.build();
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { getConfig, setConfig } from "./mod.ts";
|
||||
import * as T from "../types.ts";
|
||||
import { getConfig, setConfig } from "./mod";
|
||||
import * as T from "../types";
|
||||
|
||||
import { LegacyExpectedExports as ExpectedExports } from "../types.ts";
|
||||
import * as M from "../migrations.ts";
|
||||
import * as util from "../util.ts";
|
||||
import { EmVer } from "../emver-lite/mod.ts";
|
||||
import { ConfigSpec } from "../types/config-types.ts";
|
||||
import { Config } from "../config_builder/mod.ts";
|
||||
import { LegacyExpectedExports as ExpectedExports } from "../types";
|
||||
import * as M from "../migrations";
|
||||
import * as util from "../util";
|
||||
import { EmVer } from "../emver-lite/mod";
|
||||
import { ConfigSpec } from "../types/config-types";
|
||||
import { Config } from "../config_builder/mod";
|
||||
|
||||
export interface NoRepeat<version extends string, type extends "up" | "down"> {
|
||||
version: version;
|
||||
@@ -22,20 +22,21 @@ export interface NoRepeat<version extends string, type extends "up" | "down"> {
|
||||
*/
|
||||
export function updateConfig<
|
||||
version extends string,
|
||||
type extends "up" | "down",
|
||||
type extends "up" | "down"
|
||||
>(
|
||||
fn: (
|
||||
config: Record<string, unknown>,
|
||||
effects: T.Effects,
|
||||
effects: T.Effects
|
||||
) => ConfigSpec | Promise<ConfigSpec>,
|
||||
configured: boolean,
|
||||
noRepeat?: NoRepeat<version, type>,
|
||||
noFail = false,
|
||||
noFail = false
|
||||
): M.MigrationFn<version, type> {
|
||||
return M.migrationFn(async (effects: T.Effects) => {
|
||||
await noRepeatGuard(effects, noRepeat, async () => {
|
||||
let config =
|
||||
util.unwrapResultType(await getConfig(Config.of({}))(effects)).config;
|
||||
let config = util.unwrapResultType(
|
||||
await getConfig(Config.of({}))(effects)
|
||||
).config;
|
||||
if (config) {
|
||||
try {
|
||||
config = await fn(config, effects);
|
||||
@@ -55,11 +56,11 @@ export function updateConfig<
|
||||
|
||||
export async function noRepeatGuard<
|
||||
version extends string,
|
||||
type extends "up" | "down",
|
||||
type extends "up" | "down"
|
||||
>(
|
||||
effects: T.Effects,
|
||||
noRepeat: NoRepeat<version, type> | undefined,
|
||||
fn: () => Promise<void>,
|
||||
fn: () => Promise<void>
|
||||
): Promise<void> {
|
||||
if (!noRepeat) {
|
||||
return fn();
|
||||
@@ -92,7 +93,7 @@ export async function noRepeatGuard<
|
||||
export async function initNoRepeat<versions extends string>(
|
||||
effects: T.Effects,
|
||||
migrations: M.MigrationMapping<versions>,
|
||||
startingVersion: string,
|
||||
startingVersion: string
|
||||
) {
|
||||
if (
|
||||
!(await util.exists(effects, {
|
||||
@@ -117,14 +118,14 @@ export async function initNoRepeat<versions extends string>(
|
||||
|
||||
export function fromMapping<versions extends string>(
|
||||
migrations: M.MigrationMapping<versions>,
|
||||
currentVersion: string,
|
||||
currentVersion: string
|
||||
): ExpectedExports.migration {
|
||||
const inner = M.fromMapping(migrations, currentVersion);
|
||||
return async (effects: T.Effects, version: string, direction?: unknown) => {
|
||||
await initNoRepeat(
|
||||
effects,
|
||||
migrations,
|
||||
direction === "from" ? version : currentVersion,
|
||||
direction === "from" ? version : currentVersion
|
||||
);
|
||||
return inner(effects, version, direction);
|
||||
};
|
||||
4
lib/compat/mod.ts
Normal file
4
lib/compat/mod.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export { noPropertiesFound, properties, propertiesv2 } from "./properties";
|
||||
export { setConfig } from "./setConfig";
|
||||
export { getConfig, getConfigAndMatcher } from "./getConfig";
|
||||
export * as migrations from "./migrations";
|
||||
@@ -1,12 +1,12 @@
|
||||
import { YAML } from "../dependencies.ts";
|
||||
import { exists } from "../util.ts";
|
||||
import * as YAML from "yaml";
|
||||
import { exists } from "../util";
|
||||
import {
|
||||
Effects,
|
||||
ExpectedExports,
|
||||
LegacyExpectedExports,
|
||||
Properties,
|
||||
ResultType,
|
||||
} from "../types.ts";
|
||||
} from "../types";
|
||||
|
||||
// deno-lint-ignore no-explicit-any
|
||||
const asResult = (result: any) => ({ result: result as Properties });
|
||||
@@ -33,18 +33,21 @@ export const noPropertiesFound: ResultType<Properties> = {
|
||||
* @returns
|
||||
*/
|
||||
export const properties: LegacyExpectedExports.properties = async (
|
||||
effects: Effects,
|
||||
effects: Effects
|
||||
) => {
|
||||
if (
|
||||
await exists(effects, { path: "start9/stats.yaml", volumeId: "main" }) ===
|
||||
false
|
||||
(await exists(effects, { path: "start9/stats.yaml", volumeId: "main" })) ===
|
||||
false
|
||||
) {
|
||||
return noPropertiesFound;
|
||||
}
|
||||
return await effects.readFile({
|
||||
path: "start9/stats.yaml",
|
||||
volumeId: "main",
|
||||
}).then(YAML.parse).then(asResult);
|
||||
return await effects
|
||||
.readFile({
|
||||
path: "start9/stats.yaml",
|
||||
volumeId: "main",
|
||||
})
|
||||
.then(YAML.parse)
|
||||
.then(asResult);
|
||||
};
|
||||
/**
|
||||
* Default will pull from a file (start9/stats.yaml) expected to be made on the main volume
|
||||
@@ -53,17 +56,18 @@ export const properties: LegacyExpectedExports.properties = async (
|
||||
* @param effects
|
||||
* @returns
|
||||
*/
|
||||
export const propertiesv2: ExpectedExports.properties = async (
|
||||
{ effects },
|
||||
) => {
|
||||
export const propertiesv2: ExpectedExports.properties = async ({ effects }) => {
|
||||
if (
|
||||
await exists(effects, { path: "start9/stats.yaml", volumeId: "main" }) ===
|
||||
false
|
||||
(await exists(effects, { path: "start9/stats.yaml", volumeId: "main" })) ===
|
||||
false
|
||||
) {
|
||||
return noPropertiesFound;
|
||||
}
|
||||
return await effects.readFile({
|
||||
path: "start9/stats.yaml",
|
||||
volumeId: "main",
|
||||
}).then(YAML.parse).then(asResult);
|
||||
return await effects
|
||||
.readFile({
|
||||
path: "start9/stats.yaml",
|
||||
volumeId: "main",
|
||||
})
|
||||
.then(YAML.parse)
|
||||
.then(asResult);
|
||||
};
|
||||
@@ -1,10 +1,10 @@
|
||||
import { YAML } from "../dependencies.ts";
|
||||
import * as YAML from "yaml";
|
||||
import {
|
||||
DependsOn,
|
||||
Effects,
|
||||
LegacyExpectedExports as ExpectedExports,
|
||||
} from "../types.ts";
|
||||
import { okOf } from "../util.ts";
|
||||
} from "../types";
|
||||
import { okOf } from "../util";
|
||||
|
||||
/**
|
||||
* Will set the config to the default start9/config.yaml
|
||||
@@ -17,7 +17,7 @@ import { okOf } from "../util.ts";
|
||||
export const setConfig = async (
|
||||
effects: Effects,
|
||||
newConfig: Record<string, unknown>,
|
||||
dependsOn: DependsOn = {},
|
||||
dependsOn: DependsOn = {}
|
||||
) => {
|
||||
await effects.createDir({
|
||||
path: "start9",
|
||||
@@ -29,12 +29,10 @@ export const setConfig = async (
|
||||
volumeId: "main",
|
||||
});
|
||||
|
||||
return okOf(
|
||||
{
|
||||
signal: "SIGTERM",
|
||||
"depends-on": dependsOn,
|
||||
} as const,
|
||||
);
|
||||
return okOf({
|
||||
signal: "SIGTERM",
|
||||
"depends-on": dependsOn,
|
||||
} as const);
|
||||
};
|
||||
|
||||
const _typeConversionCheck: ExpectedExports.setConfig = setConfig;
|
||||
@@ -1,7 +1,7 @@
|
||||
import { ConfigSpec, ValueSpec } from "../types/config-types.ts";
|
||||
import { typeFromProps } from "../util.ts";
|
||||
import { BuilderExtract, IBuilder } from "./builder.ts";
|
||||
import { Value } from "./value.ts";
|
||||
import { ConfigSpec, ValueSpec } from "../types/config-types";
|
||||
import { typeFromProps } from "../util";
|
||||
import { BuilderExtract, IBuilder } from "./builder";
|
||||
import { Value } from "./value";
|
||||
|
||||
/**
|
||||
* Configs are the specs that are used by the os configuration form for this service.
|
||||
@@ -17,7 +17,7 @@ import { Value } from "./value.ts";
|
||||
});
|
||||
```
|
||||
|
||||
The idea of a config is that now the form is going to ask for
|
||||
The idea of a config is that now the form is going to ask for
|
||||
Test: [ ] and the value is going to be checked as a boolean.
|
||||
There are more complex values like enums, lists, and objects. See {@link Value}
|
||||
|
||||
@@ -487,10 +487,16 @@ export class Config<A extends ConfigSpec> extends IBuilder<A> {
|
||||
static empty() {
|
||||
return new Config({});
|
||||
}
|
||||
static withValue<K extends string, B extends ValueSpec>(key: K, value: Value<B>) {
|
||||
static withValue<K extends string, B extends ValueSpec>(
|
||||
key: K,
|
||||
value: Value<B>
|
||||
) {
|
||||
return Config.empty().withValue(key, value);
|
||||
}
|
||||
static addValue<K extends string, B extends ValueSpec>(key: K, value: Value<B>) {
|
||||
static addValue<K extends string, B extends ValueSpec>(
|
||||
key: K,
|
||||
value: Value<B>
|
||||
) {
|
||||
return Config.empty().withValue(key, value);
|
||||
}
|
||||
|
||||
51
lib/config_builder/index.test.ts
Normal file
51
lib/config_builder/index.test.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
describe("test", () => {
|
||||
test("test", () => {
|
||||
expect(true).toEqual(true);
|
||||
});
|
||||
});
|
||||
// import { Config } from "./config";
|
||||
// import { Value } from "./value";
|
||||
// import { expect } from "https://deno.land/x/expect@v0.2.9/mod";
|
||||
// const { test } = Deno;
|
||||
|
||||
// test("String", () => {
|
||||
// const bitcoinPropertiesBuilt: {
|
||||
// "peer-tor-address": {
|
||||
// name: string;
|
||||
// description: string | null;
|
||||
// type: "string";
|
||||
// };
|
||||
// } = Config.of({
|
||||
// "peer-tor-address": Value.string({
|
||||
// name: "Peer tor address",
|
||||
// default: "",
|
||||
// description: "The Tor address of the peer interface",
|
||||
// warning: null,
|
||||
// nullable: false,
|
||||
// masked: true,
|
||||
// placeholder: null,
|
||||
// pattern: null,
|
||||
// "pattern-description": null,
|
||||
// textarea: null,
|
||||
// }),
|
||||
// }).build();
|
||||
// expect(JSON.stringify(bitcoinPropertiesBuilt)).toEqual(
|
||||
// /*json*/ `{
|
||||
// "peer-tor-address": {
|
||||
// "type": "string",
|
||||
// "name": "Peer tor address",
|
||||
// "default": "",
|
||||
// "description": "The Tor address of the peer interface",
|
||||
// "warning": null,
|
||||
// "nullable": false,
|
||||
// "masked": true,
|
||||
// "placeholder": null,
|
||||
// "pattern": null,
|
||||
// "pattern-description": null,
|
||||
// "textarea": null
|
||||
// }}`
|
||||
// .replaceAll("\n", " ")
|
||||
// .replaceAll(/\s{2,}/g, "")
|
||||
// .replaceAll(": ", ":")
|
||||
// );
|
||||
// });
|
||||
@@ -1,9 +1,14 @@
|
||||
import { BuilderExtract, IBuilder } from "./builder.ts";
|
||||
import { Config } from "./config.ts";
|
||||
import { Default, NumberSpec, StringSpec } from "./value.ts";
|
||||
import { Description } from "./value.ts";
|
||||
import { Variants } from "./variants.ts";
|
||||
import { ConfigSpec, UniqueBy, ValueSpecList, ValueSpecListOf } from "../types/config-types.ts";
|
||||
import { BuilderExtract, IBuilder } from "./builder";
|
||||
import { Config } from "./config";
|
||||
import { Default, NumberSpec, StringSpec } from "./value";
|
||||
import { Description } from "./value";
|
||||
import { Variants } from "./variants";
|
||||
import {
|
||||
ConfigSpec,
|
||||
UniqueBy,
|
||||
ValueSpecList,
|
||||
ValueSpecListOf,
|
||||
} from "../types/config-types";
|
||||
|
||||
/**
|
||||
* Used as a subtype of Value.list
|
||||
@@ -85,7 +90,9 @@ export class List<A extends ValueSpecList> extends IBuilder<A> {
|
||||
>(a: A) {
|
||||
const { spec: previousSpec, ...rest } = a;
|
||||
const { spec: previousSpecSpec, ...restSpec } = previousSpec;
|
||||
const specSpec = previousSpecSpec.build() as BuilderExtract<A["spec"]["spec"]>;
|
||||
const specSpec = previousSpecSpec.build() as BuilderExtract<
|
||||
A["spec"]["spec"]
|
||||
>;
|
||||
const spec = {
|
||||
...restSpec,
|
||||
spec: specSpec,
|
||||
@@ -124,7 +131,9 @@ export class List<A extends ValueSpecList> extends IBuilder<A> {
|
||||
>(a: A) {
|
||||
const { spec: previousSpec, ...rest } = a;
|
||||
const { variants: previousVariants, ...restSpec } = previousSpec;
|
||||
const variants = previousVariants.build() as BuilderExtract<A["spec"]["variants"]>;
|
||||
const variants = previousVariants.build() as BuilderExtract<
|
||||
A["spec"]["variants"]
|
||||
>;
|
||||
const spec = {
|
||||
...restSpec,
|
||||
variants,
|
||||
13
lib/config_builder/mod.ts
Normal file
13
lib/config_builder/mod.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Config } from "./config";
|
||||
import { List } from "./list";
|
||||
import { Value } from "./value";
|
||||
import { Variants } from "./variants";
|
||||
|
||||
export {
|
||||
/** @typedef { import("./config").Config } Pet
|
||||
*/
|
||||
Config,
|
||||
List,
|
||||
Value,
|
||||
Variants,
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
import { BuilderExtract, IBuilder } from "./builder.ts";
|
||||
import { Config } from "./config.ts";
|
||||
import { List } from "./list.ts";
|
||||
import { Variants } from "./variants.ts";
|
||||
import { BuilderExtract, IBuilder } from "./builder";
|
||||
import { Config } from "./config";
|
||||
import { List } from "./list";
|
||||
import { Variants } from "./variants";
|
||||
import {
|
||||
ConfigSpec,
|
||||
UniqueBy,
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
ValueSpecList,
|
||||
ValueSpecNumber,
|
||||
ValueSpecString,
|
||||
} from "../types/config-types.ts";
|
||||
} from "../types/config-types";
|
||||
|
||||
export type DefaultString =
|
||||
| string
|
||||
@@ -76,13 +76,20 @@ export class Value<A extends ValueSpec> extends IBuilder<A> {
|
||||
...a,
|
||||
});
|
||||
}
|
||||
static string<A extends Description & NullableDefault<DefaultString> & Nullable & StringSpec>(a: A) {
|
||||
static string<
|
||||
A extends Description &
|
||||
NullableDefault<DefaultString> &
|
||||
Nullable &
|
||||
StringSpec
|
||||
>(a: A) {
|
||||
return new Value({
|
||||
type: "string" as const,
|
||||
...a,
|
||||
} as ValueSpecString);
|
||||
}
|
||||
static number<A extends Description & NullableDefault<number> & Nullable & NumberSpec>(a: A) {
|
||||
static number<
|
||||
A extends Description & NullableDefault<number> & Nullable & NumberSpec
|
||||
>(a: A) {
|
||||
return new Value({
|
||||
type: "number" as const,
|
||||
...a,
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ConfigSpec } from "../types/config-types.ts";
|
||||
import { BuilderExtract, IBuilder } from "./builder.ts";
|
||||
import { Config } from "./mod.ts";
|
||||
import { ConfigSpec } from "../types/config-types";
|
||||
import { BuilderExtract, IBuilder } from "./builder";
|
||||
import { Config } from "./mod";
|
||||
|
||||
/**
|
||||
* Used in the the Value.enum { @link './value.ts' }
|
||||
@@ -38,7 +38,9 @@ import { Config } from "./mod.ts";
|
||||
});
|
||||
```
|
||||
*/
|
||||
export class Variants<A extends { [key: string]: ConfigSpec }> extends IBuilder<A> {
|
||||
export class Variants<
|
||||
A extends { [key: string]: ConfigSpec }
|
||||
> extends IBuilder<A> {
|
||||
static of<
|
||||
A extends {
|
||||
[key: string]: Config<ConfigSpec>;
|
||||
@@ -56,11 +58,17 @@ export class Variants<A extends { [key: string]: ConfigSpec }> extends IBuilder<
|
||||
static empty() {
|
||||
return Variants.of({});
|
||||
}
|
||||
static withVariant<K extends string, B extends ConfigSpec>(key: K, value: Config<B>) {
|
||||
static withVariant<K extends string, B extends ConfigSpec>(
|
||||
key: K,
|
||||
value: Config<B>
|
||||
) {
|
||||
return Variants.empty().withVariant(key, value);
|
||||
}
|
||||
|
||||
withVariant<K extends string, B extends ConfigSpec>(key: K, value: Config<B>) {
|
||||
withVariant<K extends string, B extends ConfigSpec>(
|
||||
key: K,
|
||||
value: Config<B>
|
||||
) {
|
||||
return new Variants({
|
||||
...this.a,
|
||||
[key]: value.build(),
|
||||
@@ -1,6 +1,8 @@
|
||||
import { matches, TOML, YAML } from "../dependencies.ts";
|
||||
import * as T from "../types.ts";
|
||||
import { exists } from "../util.ts";
|
||||
import * as matches from "ts-matches";
|
||||
import * as YAML from "yaml";
|
||||
import * as TOML from "@iarna/toml";
|
||||
import * as T from "../types";
|
||||
import { exists } from "../util";
|
||||
|
||||
const previousPath = /(.+?)\/([^/]*)$/;
|
||||
|
||||
@@ -56,7 +58,7 @@ export class ConfigFile<A> {
|
||||
volume: string;
|
||||
writeData(dataIn: A): string;
|
||||
readData(stringValue: string): A;
|
||||
},
|
||||
}
|
||||
) {}
|
||||
async write(data: A, effects: T.Effects) {
|
||||
let matched;
|
||||
@@ -79,22 +81,22 @@ export class ConfigFile<A> {
|
||||
path: this.options.path,
|
||||
volumeId: this.options.volume,
|
||||
}))
|
||||
) return null;
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return this.options.readData(
|
||||
await effects.readFile({
|
||||
path: this.options.path,
|
||||
volumeId: this.options.volume,
|
||||
}),
|
||||
})
|
||||
);
|
||||
}
|
||||
static raw<A>(
|
||||
options: {
|
||||
path: string;
|
||||
volume: string;
|
||||
fromData(dataIn: A): string;
|
||||
toData(rawData: string): A;
|
||||
},
|
||||
) {
|
||||
static raw<A>(options: {
|
||||
path: string;
|
||||
volume: string;
|
||||
fromData(dataIn: A): string;
|
||||
toData(rawData: string): A;
|
||||
}) {
|
||||
return new ConfigFile<A>({
|
||||
path: options.path,
|
||||
volume: options.volume,
|
||||
@@ -102,13 +104,11 @@ export class ConfigFile<A> {
|
||||
readData: options.toData,
|
||||
});
|
||||
}
|
||||
static json<A>(
|
||||
options: {
|
||||
path: string;
|
||||
volume: string;
|
||||
validator: matches.Validator<unknown, A>;
|
||||
},
|
||||
) {
|
||||
static json<A>(options: {
|
||||
path: string;
|
||||
volume: string;
|
||||
validator: matches.Validator<unknown, A>;
|
||||
}) {
|
||||
return new ConfigFile<A>({
|
||||
path: options.path,
|
||||
volume: options.volume,
|
||||
@@ -129,7 +129,7 @@ export class ConfigFile<A> {
|
||||
path: options.path,
|
||||
volume: options.volume,
|
||||
writeData(inData) {
|
||||
return TOML.stringify(inData);
|
||||
return TOML.stringify(inData as TOML.JsonMap);
|
||||
},
|
||||
readData(inString) {
|
||||
return options.validator.unsafeCast(TOML.parse(inString));
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ConfigFile } from "./config_file.ts";
|
||||
import { ConfigFile } from "./config_file";
|
||||
|
||||
/**
|
||||
* A useful tool when doing a getConfig.
|
||||
@@ -10,5 +10,5 @@ export function nullIfEmpty(s: Record<string, unknown>) {
|
||||
return Object.keys(s).length === 0 ? null : s;
|
||||
}
|
||||
|
||||
export { setupConfigExports } from "./setup_config_export.ts";
|
||||
export { setupConfigExports } from "./setup_config_export";
|
||||
export { ConfigFile };
|
||||
@@ -1,16 +1,16 @@
|
||||
import { Config } from "../config_builder/mod.ts";
|
||||
import { DependsOn, Effects, ExpectedExports } from "../types.ts";
|
||||
import { ConfigSpec } from "../types/config-types.ts";
|
||||
import { okOf } from "../util.ts";
|
||||
import { TypeFromProps } from "../utils/propertiesMatcher.ts";
|
||||
import { nullIfEmpty } from "./mod.ts";
|
||||
import { Config } from "../config_builder/mod";
|
||||
import { DependsOn, Effects, ExpectedExports } from "../types";
|
||||
import { ConfigSpec } from "../types/config-types";
|
||||
import { okOf } from "../util";
|
||||
import { TypeFromProps } from "../utils/propertiesMatcher";
|
||||
import { nullIfEmpty } from "./mod";
|
||||
|
||||
export function setupConfigExports<A extends ConfigSpec>(options: {
|
||||
spec: Config<A>;
|
||||
dependsOn: DependsOn;
|
||||
write(effects: Effects, config: TypeFromProps<A>): Promise<null>;
|
||||
read(
|
||||
effects: Effects,
|
||||
effects: Effects
|
||||
): Promise<Record<string | number, never> | TypeFromProps<A>>;
|
||||
}) {
|
||||
const validator = options.spec.validator();
|
||||
246
lib/emver-lite/emverList.test.ts
Normal file
246
lib/emver-lite/emverList.test.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import { EmVer, notRange, rangeAnd, rangeOf, rangeOr } from "./mod";
|
||||
describe("EmVer", () => {
|
||||
{
|
||||
{
|
||||
const checker = rangeOf("*");
|
||||
test("rangeOf('*')", () => {
|
||||
expect(checker.check("1")).toEqual(true);
|
||||
expect(checker.check("1.2")).toEqual(true);
|
||||
expect(checker.check("1.2.3.4")).toEqual(true);
|
||||
});
|
||||
test("rangeOf('*') invalid", () => {
|
||||
expect(() => checker.check("a")).toThrow();
|
||||
expect(() => checker.check("")).toThrow();
|
||||
expect(() => checker.check("1..3")).toThrow();
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const checker = rangeOf(">1.2.3.4");
|
||||
test(`rangeOf(">1.2.3.4") valid`, () => {
|
||||
expect(checker.check("2")).toEqual(true);
|
||||
expect(checker.check("1.2.3.5")).toEqual(true);
|
||||
expect(checker.check("1.2.3.4.1")).toEqual(true);
|
||||
});
|
||||
|
||||
test(`rangeOf(">1.2.3.4") invalid`, () => {
|
||||
expect(checker.check("1.2.3.4")).toEqual(false);
|
||||
expect(checker.check("1.2.3")).toEqual(false);
|
||||
expect(checker.check("1")).toEqual(false);
|
||||
});
|
||||
}
|
||||
{
|
||||
const checker = rangeOf("=1.2.3");
|
||||
test(`rangeOf("=1.2.3") valid`, () => {
|
||||
expect(checker.check("1.2.3")).toEqual(true);
|
||||
});
|
||||
|
||||
test(`rangeOf("=1.2.3") invalid`, () => {
|
||||
expect(checker.check("2")).toEqual(false);
|
||||
expect(checker.check("1.2.3.1")).toEqual(false);
|
||||
expect(checker.check("1.2")).toEqual(false);
|
||||
});
|
||||
}
|
||||
{
|
||||
const checker = rangeOf(">=1.2.3.4");
|
||||
test(`rangeOf(">=1.2.3.4") valid`, () => {
|
||||
expect(checker.check("2")).toEqual(true);
|
||||
expect(checker.check("1.2.3.5")).toEqual(true);
|
||||
expect(checker.check("1.2.3.4.1")).toEqual(true);
|
||||
expect(checker.check("1.2.3.4")).toEqual(true);
|
||||
});
|
||||
|
||||
test(`rangeOf(">=1.2.3.4") invalid`, () => {
|
||||
expect(checker.check("1.2.3")).toEqual(false);
|
||||
expect(checker.check("1")).toEqual(false);
|
||||
});
|
||||
}
|
||||
{
|
||||
const checker = rangeOf("<1.2.3.4");
|
||||
test(`rangeOf("<1.2.3.4") invalid`, () => {
|
||||
expect(checker.check("2")).toEqual(false);
|
||||
expect(checker.check("1.2.3.5")).toEqual(false);
|
||||
expect(checker.check("1.2.3.4.1")).toEqual(false);
|
||||
expect(checker.check("1.2.3.4")).toEqual(false);
|
||||
});
|
||||
|
||||
test(`rangeOf("<1.2.3.4") valid`, () => {
|
||||
expect(checker.check("1.2.3")).toEqual(true);
|
||||
expect(checker.check("1")).toEqual(true);
|
||||
});
|
||||
}
|
||||
{
|
||||
const checker = rangeOf("<=1.2.3.4");
|
||||
test(`rangeOf("<=1.2.3.4") invalid`, () => {
|
||||
expect(checker.check("2")).toEqual(false);
|
||||
expect(checker.check("1.2.3.5")).toEqual(false);
|
||||
expect(checker.check("1.2.3.4.1")).toEqual(false);
|
||||
});
|
||||
|
||||
test(`rangeOf("<=1.2.3.4") valid`, () => {
|
||||
expect(checker.check("1.2.3")).toEqual(true);
|
||||
expect(checker.check("1")).toEqual(true);
|
||||
expect(checker.check("1.2.3.4")).toEqual(true);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const checkA = rangeOf(">1");
|
||||
const checkB = rangeOf("<=2");
|
||||
|
||||
const checker = rangeAnd(checkA, checkB);
|
||||
test(`simple and(checkers) valid`, () => {
|
||||
expect(checker.check("2")).toEqual(true);
|
||||
|
||||
expect(checker.check("1.1")).toEqual(true);
|
||||
});
|
||||
test(`simple and(checkers) invalid`, () => {
|
||||
expect(checker.check("2.1")).toEqual(false);
|
||||
expect(checker.check("1")).toEqual(false);
|
||||
expect(checker.check("0")).toEqual(false);
|
||||
});
|
||||
}
|
||||
{
|
||||
const checkA = rangeOf("<1");
|
||||
const checkB = rangeOf("=2");
|
||||
|
||||
const checker = rangeOr(checkA, checkB);
|
||||
test(`simple or(checkers) valid`, () => {
|
||||
expect(checker.check("2")).toEqual(true);
|
||||
expect(checker.check("0.1")).toEqual(true);
|
||||
});
|
||||
test(`simple or(checkers) invalid`, () => {
|
||||
expect(checker.check("2.1")).toEqual(false);
|
||||
expect(checker.check("1")).toEqual(false);
|
||||
expect(checker.check("1.1")).toEqual(false);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const checker = rangeOf("1.2.*");
|
||||
test(`rangeOf(1.2.*) valid`, () => {
|
||||
expect(checker.check("1.2")).toEqual(true);
|
||||
expect(checker.check("1.2.1")).toEqual(true);
|
||||
});
|
||||
test(`rangeOf(1.2.*) invalid`, () => {
|
||||
expect(checker.check("1.3")).toEqual(false);
|
||||
expect(checker.check("1.3.1")).toEqual(false);
|
||||
|
||||
expect(checker.check("1.1.1")).toEqual(false);
|
||||
expect(checker.check("1.1")).toEqual(false);
|
||||
expect(checker.check("1")).toEqual(false);
|
||||
|
||||
expect(checker.check("2")).toEqual(false);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const checker = notRange(rangeOf("1.2.*"));
|
||||
test(`notRange(rangeOf(1.2.*)) valid`, () => {
|
||||
expect(checker.check("1.3")).toEqual(true);
|
||||
expect(checker.check("1.3.1")).toEqual(true);
|
||||
|
||||
expect(checker.check("1.1.1")).toEqual(true);
|
||||
expect(checker.check("1.1")).toEqual(true);
|
||||
expect(checker.check("1")).toEqual(true);
|
||||
|
||||
expect(checker.check("2")).toEqual(true);
|
||||
});
|
||||
test(`notRange(rangeOf(1.2.*)) invalid `, () => {
|
||||
expect(checker.check("1.2")).toEqual(false);
|
||||
expect(checker.check("1.2.1")).toEqual(false);
|
||||
});
|
||||
}
|
||||
{
|
||||
const checker = rangeOf("!1.2.*");
|
||||
test(`!(rangeOf(1.2.*)) valid`, () => {
|
||||
expect(checker.check("1.3")).toEqual(true);
|
||||
expect(checker.check("1.3.1")).toEqual(true);
|
||||
|
||||
expect(checker.check("1.1.1")).toEqual(true);
|
||||
expect(checker.check("1.1")).toEqual(true);
|
||||
expect(checker.check("1")).toEqual(true);
|
||||
|
||||
expect(checker.check("2")).toEqual(true);
|
||||
});
|
||||
test(`!(rangeOf(1.2.*)) invalid `, () => {
|
||||
expect(checker.check("1.2")).toEqual(false);
|
||||
expect(checker.check("1.2.1")).toEqual(false);
|
||||
});
|
||||
}
|
||||
{
|
||||
test(`no and ranges`, () => {
|
||||
expect(() => rangeAnd()).toThrow();
|
||||
});
|
||||
test(`no or ranges`, () => {
|
||||
expect(() => rangeOr()).toThrow();
|
||||
});
|
||||
}
|
||||
{
|
||||
const checker = rangeOf("!>1.2.3.4");
|
||||
test(`rangeOf("!>1.2.3.4") invalid`, () => {
|
||||
expect(checker.check("2")).toEqual(false);
|
||||
expect(checker.check("1.2.3.5")).toEqual(false);
|
||||
expect(checker.check("1.2.3.4.1")).toEqual(false);
|
||||
});
|
||||
|
||||
test(`rangeOf("!>1.2.3.4") valid`, () => {
|
||||
expect(checker.check("1.2.3.4")).toEqual(true);
|
||||
expect(checker.check("1.2.3")).toEqual(true);
|
||||
expect(checker.check("1")).toEqual(true);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
test(">1 && =1.2", () => {
|
||||
const checker = rangeOf(">1 && =1.2");
|
||||
|
||||
expect(checker.check("1.2")).toEqual(true);
|
||||
expect(checker.check("1.2.1")).toEqual(false);
|
||||
});
|
||||
test("=1 || =2", () => {
|
||||
const checker = rangeOf("=1 || =2");
|
||||
|
||||
expect(checker.check("1")).toEqual(true);
|
||||
expect(checker.check("2")).toEqual(true);
|
||||
expect(checker.check("3")).toEqual(false);
|
||||
});
|
||||
|
||||
test(">1 && =1.2 || =2", () => {
|
||||
const checker = rangeOf(">1 && =1.2 || =2");
|
||||
|
||||
expect(checker.check("1.2")).toEqual(true);
|
||||
expect(checker.check("1")).toEqual(false);
|
||||
expect(checker.check("2")).toEqual(true);
|
||||
expect(checker.check("3")).toEqual(false);
|
||||
});
|
||||
|
||||
test("&& before || order of operationns: <1.5 && >1 || >1.5 && <3", () => {
|
||||
const checker = rangeOf("<1.5 && >1 || >1.5 && <3");
|
||||
expect(checker.check("1.1")).toEqual(true);
|
||||
expect(checker.check("2")).toEqual(true);
|
||||
|
||||
expect(checker.check("1.5")).toEqual(false);
|
||||
expect(checker.check("1")).toEqual(false);
|
||||
expect(checker.check("3")).toEqual(false);
|
||||
});
|
||||
|
||||
test("Compare function on the emver", () => {
|
||||
const a = EmVer.from("1.2.3");
|
||||
const b = EmVer.from("1.2.4");
|
||||
|
||||
expect(a.compare(b)).toEqual("less");
|
||||
expect(b.compare(a)).toEqual("greater");
|
||||
expect(a.compare(a)).toEqual("equal");
|
||||
});
|
||||
test("Compare for sort function on the emver", () => {
|
||||
const a = EmVer.from("1.2.3");
|
||||
const b = EmVer.from("1.2.4");
|
||||
|
||||
expect(a.compareForSort(b)).toEqual(-1);
|
||||
expect(b.compareForSort(a)).toEqual(1);
|
||||
expect(a.compareForSort(a)).toEqual(0);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
import { matches } from "../dependencies.ts";
|
||||
import * as matches from "ts-matches";
|
||||
|
||||
const starSub = /((\d+\.)*\d+)\.\*/;
|
||||
|
||||
@@ -146,7 +146,8 @@ export class EmVer {
|
||||
* @returns
|
||||
*/
|
||||
public compareForSort(other: EmVer) {
|
||||
return matches.matches(this.compare(other))
|
||||
return matches
|
||||
.matches(this.compare(other))
|
||||
.when("equal", () => 0 as const)
|
||||
.when("greater", () => 1 as const)
|
||||
.when("less", () => -1 as const)
|
||||
@@ -192,8 +193,11 @@ export class Checker {
|
||||
|
||||
return new Checker((version) => {
|
||||
const v = EmVer.from(version);
|
||||
return (v.greaterThan(emVarLower) || v.equals(emVarLower)) &&
|
||||
!v.greaterThan(emVarUpper) && !v.equals(emVarUpper);
|
||||
return (
|
||||
(v.greaterThan(emVarLower) || v.equals(emVarLower)) &&
|
||||
!v.greaterThan(emVarUpper) &&
|
||||
!v.equals(emVarUpper)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -245,7 +249,7 @@ export class Checker {
|
||||
* Check is the function that will be given a emver or unparsed emver and should give if it follows
|
||||
* a pattern
|
||||
*/
|
||||
public readonly check: (value: string | EmVer) => boolean,
|
||||
public readonly check: (value: string | EmVer) => boolean
|
||||
) {}
|
||||
|
||||
/**
|
||||
@@ -1,15 +0,0 @@
|
||||
// https://github.com/tc39/proposal-accessible-object-hasownproperty/blob/main/polyfill.js
|
||||
if (!Object.hasOwn) {
|
||||
Object.defineProperty(Object, "hasOwn", {
|
||||
value: function (object, property) {
|
||||
if (object == null) {
|
||||
throw new TypeError("Cannot convert undefined or null to object");
|
||||
}
|
||||
return Object.prototype.hasOwnProperty.call(Object(object), property);
|
||||
},
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
});
|
||||
}
|
||||
export {};
|
||||
@@ -1,62 +0,0 @@
|
||||
import { Deno } from "@deno/shim-deno";
|
||||
export { Deno } from "@deno/shim-deno";
|
||||
const dntGlobals = {
|
||||
Deno,
|
||||
};
|
||||
export const dntGlobalThis = createMergeProxy(globalThis, dntGlobals);
|
||||
// deno-lint-ignore ban-types
|
||||
function createMergeProxy(baseObj, extObj) {
|
||||
return new Proxy(baseObj, {
|
||||
get(_target, prop, _receiver) {
|
||||
if (prop in extObj) {
|
||||
return extObj[prop];
|
||||
}
|
||||
else {
|
||||
return baseObj[prop];
|
||||
}
|
||||
},
|
||||
set(_target, prop, value) {
|
||||
if (prop in extObj) {
|
||||
delete extObj[prop];
|
||||
}
|
||||
baseObj[prop] = value;
|
||||
return true;
|
||||
},
|
||||
deleteProperty(_target, prop) {
|
||||
let success = false;
|
||||
if (prop in extObj) {
|
||||
delete extObj[prop];
|
||||
success = true;
|
||||
}
|
||||
if (prop in baseObj) {
|
||||
delete baseObj[prop];
|
||||
success = true;
|
||||
}
|
||||
return success;
|
||||
},
|
||||
ownKeys(_target) {
|
||||
const baseKeys = Reflect.ownKeys(baseObj);
|
||||
const extKeys = Reflect.ownKeys(extObj);
|
||||
const extKeysSet = new Set(extKeys);
|
||||
return [...baseKeys.filter((k) => !extKeysSet.has(k)), ...extKeys];
|
||||
},
|
||||
defineProperty(_target, prop, desc) {
|
||||
if (prop in extObj) {
|
||||
delete extObj[prop];
|
||||
}
|
||||
Reflect.defineProperty(baseObj, prop, desc);
|
||||
return true;
|
||||
},
|
||||
getOwnPropertyDescriptor(_target, prop) {
|
||||
if (prop in extObj) {
|
||||
return Reflect.getOwnPropertyDescriptor(extObj, prop);
|
||||
}
|
||||
else {
|
||||
return Reflect.getOwnPropertyDescriptor(baseObj, prop);
|
||||
}
|
||||
},
|
||||
has(_target, prop) {
|
||||
return prop in extObj || prop in baseObj;
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
import { Deno } from "@deno/shim-deno";
|
||||
export { Deno } from "@deno/shim-deno";
|
||||
const dntGlobals = {
|
||||
Deno,
|
||||
};
|
||||
export const dntGlobalThis = createMergeProxy(globalThis, dntGlobals);
|
||||
// deno-lint-ignore ban-types
|
||||
function createMergeProxy(baseObj, extObj) {
|
||||
return new Proxy(baseObj, {
|
||||
get(_target, prop, _receiver) {
|
||||
if (prop in extObj) {
|
||||
return extObj[prop];
|
||||
}
|
||||
else {
|
||||
return baseObj[prop];
|
||||
}
|
||||
},
|
||||
set(_target, prop, value) {
|
||||
if (prop in extObj) {
|
||||
delete extObj[prop];
|
||||
}
|
||||
baseObj[prop] = value;
|
||||
return true;
|
||||
},
|
||||
deleteProperty(_target, prop) {
|
||||
let success = false;
|
||||
if (prop in extObj) {
|
||||
delete extObj[prop];
|
||||
success = true;
|
||||
}
|
||||
if (prop in baseObj) {
|
||||
delete baseObj[prop];
|
||||
success = true;
|
||||
}
|
||||
return success;
|
||||
},
|
||||
ownKeys(_target) {
|
||||
const baseKeys = Reflect.ownKeys(baseObj);
|
||||
const extKeys = Reflect.ownKeys(extObj);
|
||||
const extKeysSet = new Set(extKeys);
|
||||
return [...baseKeys.filter((k) => !extKeysSet.has(k)), ...extKeys];
|
||||
},
|
||||
defineProperty(_target, prop, desc) {
|
||||
if (prop in extObj) {
|
||||
delete extObj[prop];
|
||||
}
|
||||
Reflect.defineProperty(baseObj, prop, desc);
|
||||
return true;
|
||||
},
|
||||
getOwnPropertyDescriptor(_target, prop) {
|
||||
if (prop in extObj) {
|
||||
return Reflect.getOwnPropertyDescriptor(extObj, prop);
|
||||
}
|
||||
else {
|
||||
return Reflect.getOwnPropertyDescriptor(baseObj, prop);
|
||||
}
|
||||
},
|
||||
has(_target, prop) {
|
||||
return prop in extObj || prop in baseObj;
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -1,129 +0,0 @@
|
||||
import { ok } from "./util.js";
|
||||
export const DEFAULT_OPTIONS = {
|
||||
delete: true,
|
||||
force: true,
|
||||
ignoreExisting: false,
|
||||
exclude: [],
|
||||
};
|
||||
/**
|
||||
* This utility simplifies the volume backup process.
|
||||
* ```ts
|
||||
* export const { createBackup, restoreBackup } = Backups.volumes("main").build();
|
||||
* ```
|
||||
*
|
||||
* Changing the options of the rsync, (ie exludes) use either
|
||||
* ```ts
|
||||
* Backups.volumes("main").set_options({exclude: ['bigdata/']}).volumes('excludedVolume').build()
|
||||
* // or
|
||||
* Backups.with_options({exclude: ['bigdata/']}).volumes('excludedVolume').build()
|
||||
* ```
|
||||
*
|
||||
* Using the more fine control, using the addSets for more control
|
||||
* ```ts
|
||||
* Backups.addSets({
|
||||
* srcVolume: 'main', srcPath:'smallData/', dstPath: 'main/smallData/', dstVolume: : Backups.BACKUP
|
||||
* }, {
|
||||
* srcVolume: 'main', srcPath:'bigData/', dstPath: 'main/bigData/', dstVolume: : Backups.BACKUP, options: {exclude:['bigData/excludeThis']}}
|
||||
* ).build()
|
||||
* ```
|
||||
*/
|
||||
export class Backups {
|
||||
constructor(options = DEFAULT_OPTIONS, backupSet = []) {
|
||||
Object.defineProperty(this, "options", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: options
|
||||
});
|
||||
Object.defineProperty(this, "backupSet", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: backupSet
|
||||
});
|
||||
}
|
||||
static volumes(...volumeNames) {
|
||||
return new Backups().addSets(...volumeNames.map((srcVolume) => ({
|
||||
srcVolume,
|
||||
srcPath: "./",
|
||||
dstPath: `./${srcVolume}/`,
|
||||
dstVolume: Backups.BACKUP,
|
||||
})));
|
||||
}
|
||||
static addSets(...options) {
|
||||
return new Backups().addSets(...options);
|
||||
}
|
||||
static with_options(options) {
|
||||
return new Backups({ ...DEFAULT_OPTIONS, ...options });
|
||||
}
|
||||
set_options(options) {
|
||||
this.options = {
|
||||
...this.options,
|
||||
...options,
|
||||
};
|
||||
return this;
|
||||
}
|
||||
volumes(...volumeNames) {
|
||||
return this.addSets(...volumeNames.map((srcVolume) => ({
|
||||
srcVolume,
|
||||
srcPath: "./",
|
||||
dstPath: `./${srcVolume}/`,
|
||||
dstVolume: Backups.BACKUP,
|
||||
})));
|
||||
}
|
||||
addSets(...options) {
|
||||
options.forEach((x) => this.backupSet.push({ ...x, options: { ...this.options, ...x.options } }));
|
||||
return this;
|
||||
}
|
||||
build() {
|
||||
const createBackup = async (effects) => {
|
||||
for (const item of this.backupSet) {
|
||||
if (notEmptyPath(item.dstPath)) {
|
||||
await effects.createDir({
|
||||
volumeId: item.dstVolume,
|
||||
path: item.dstPath,
|
||||
});
|
||||
}
|
||||
await effects.runRsync({
|
||||
...item,
|
||||
options: {
|
||||
...this.options,
|
||||
...item.options,
|
||||
},
|
||||
}).wait();
|
||||
}
|
||||
return ok;
|
||||
};
|
||||
const restoreBackup = async (effects) => {
|
||||
for (const item of this.backupSet) {
|
||||
if (notEmptyPath(item.srcPath)) {
|
||||
await effects.createDir({
|
||||
volumeId: item.srcVolume,
|
||||
path: item.srcPath,
|
||||
});
|
||||
}
|
||||
await effects.runRsync({
|
||||
options: {
|
||||
...this.options,
|
||||
...item.options,
|
||||
},
|
||||
srcVolume: item.dstVolume,
|
||||
dstVolume: item.srcVolume,
|
||||
srcPath: item.dstPath,
|
||||
dstPath: item.srcPath,
|
||||
}).wait();
|
||||
}
|
||||
return ok;
|
||||
};
|
||||
return { createBackup, restoreBackup };
|
||||
}
|
||||
}
|
||||
Object.defineProperty(Backups, "BACKUP", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: "BACKUP"
|
||||
});
|
||||
function notEmptyPath(file) {
|
||||
return ["", ".", "./"].indexOf(file) === -1;
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
import { YAML } from "../dependencies.js";
|
||||
import { matches } from "../dependencies.js";
|
||||
import { typeFromProps } from "../utils/propertiesMatcher.js";
|
||||
const { any, string, dictionary } = matches;
|
||||
const matchConfig = dictionary([string, any]);
|
||||
/**
|
||||
* Call with the configuration to get a standard getConfig for the expected exports
|
||||
* Assumption: start9/config.yaml is where the config will be stored
|
||||
* Throws: Error if there is no file
|
||||
* Throws: Error if the config.yaml isn't yaml nor config shape
|
||||
* @param spec
|
||||
* @returns
|
||||
*/
|
||||
export const getConfig = (spec) => async (effects) => {
|
||||
const config = await effects
|
||||
.readFile({
|
||||
path: "start9/config.yaml",
|
||||
volumeId: "main",
|
||||
})
|
||||
.then((x) => YAML.parse(x))
|
||||
.then((x) => matchConfig.unsafeCast(x))
|
||||
.catch((e) => {
|
||||
effects.info(`Got error ${e} while trying to read the config`);
|
||||
return undefined;
|
||||
});
|
||||
return {
|
||||
result: {
|
||||
config,
|
||||
spec: spec.build(),
|
||||
},
|
||||
};
|
||||
};
|
||||
/**
|
||||
* Call with the configuration to get a standard getConfig for the expected exports
|
||||
* Assumption: start9/config.yaml is where the config will be stored
|
||||
* Throws: Error if there is no file
|
||||
* Throws: Error if the config.yaml isn't yaml nor config shape
|
||||
* @param spec
|
||||
* @returns A funnction for getConfig and the matcher for the spec sent in
|
||||
*/
|
||||
export const getConfigAndMatcher = (spec) => {
|
||||
const specBuilt = spec.build();
|
||||
return [
|
||||
async (effects) => {
|
||||
const config = await effects
|
||||
.readFile({
|
||||
path: "start9/config.yaml",
|
||||
volumeId: "main",
|
||||
})
|
||||
.then((x) => YAML.parse(x))
|
||||
.then((x) => matchConfig.unsafeCast(x))
|
||||
.catch((e) => {
|
||||
effects.info(`Got error ${e} while trying to read the config`);
|
||||
return undefined;
|
||||
});
|
||||
return {
|
||||
result: {
|
||||
config,
|
||||
spec: specBuilt,
|
||||
},
|
||||
};
|
||||
},
|
||||
typeFromProps(specBuilt),
|
||||
];
|
||||
};
|
||||
@@ -1,87 +0,0 @@
|
||||
import { getConfig, setConfig } from "./mod.js";
|
||||
import * as M from "../migrations.js";
|
||||
import * as util from "../util.js";
|
||||
import { EmVer } from "../emver-lite/mod.js";
|
||||
import { Config } from "../config/mod.js";
|
||||
/**
|
||||
* @param fn function making desired modifications to the config
|
||||
* @param configured whether or not the service should be considered "configured"
|
||||
* @param noRepeat (optional) supply the version and type of the migration
|
||||
* @param noFail (optional, default:false) whether or not to fail the migration if fn throws an error
|
||||
* @returns a migraion function
|
||||
*/
|
||||
export function updateConfig(fn, configured, noRepeat, noFail = false) {
|
||||
return M.migrationFn(async (effects) => {
|
||||
await noRepeatGuard(effects, noRepeat, async () => {
|
||||
let config = util.unwrapResultType(await getConfig(Config.of({}))(effects)).config;
|
||||
if (config) {
|
||||
try {
|
||||
config = await fn(config, effects);
|
||||
}
|
||||
catch (e) {
|
||||
if (!noFail) {
|
||||
throw e;
|
||||
}
|
||||
else {
|
||||
configured = false;
|
||||
}
|
||||
}
|
||||
util.unwrapResultType(await setConfig(effects, config));
|
||||
}
|
||||
});
|
||||
return { configured };
|
||||
});
|
||||
}
|
||||
export async function noRepeatGuard(effects, noRepeat, fn) {
|
||||
if (!noRepeat) {
|
||||
return fn();
|
||||
}
|
||||
if (!(await util.exists(effects, {
|
||||
path: "start9/migrations",
|
||||
volumeId: "main",
|
||||
}))) {
|
||||
await effects.createDir({ path: "start9/migrations", volumeId: "main" });
|
||||
}
|
||||
const migrationPath = {
|
||||
path: `start9/migrations/${noRepeat.version}.complete`,
|
||||
volumeId: "main",
|
||||
};
|
||||
if (noRepeat.type === "up") {
|
||||
if (!(await util.exists(effects, migrationPath))) {
|
||||
await fn();
|
||||
await effects.writeFile({ ...migrationPath, toWrite: "" });
|
||||
}
|
||||
}
|
||||
else if (noRepeat.type === "down") {
|
||||
if (await util.exists(effects, migrationPath)) {
|
||||
await fn();
|
||||
await effects.removeFile(migrationPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
export async function initNoRepeat(effects, migrations, startingVersion) {
|
||||
if (!(await util.exists(effects, {
|
||||
path: "start9/migrations",
|
||||
volumeId: "main",
|
||||
}))) {
|
||||
const starting = EmVer.parse(startingVersion);
|
||||
await effects.createDir({ path: "start9/migrations", volumeId: "main" });
|
||||
for (const version in migrations) {
|
||||
const migrationVersion = EmVer.parse(version);
|
||||
if (migrationVersion.lessThanOrEqual(starting)) {
|
||||
await effects.writeFile({
|
||||
path: `start9/migrations/${version}.complete`,
|
||||
volumeId: "main",
|
||||
toWrite: "",
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
export function fromMapping(migrations, currentVersion) {
|
||||
const inner = M.fromMapping(migrations, currentVersion);
|
||||
return async (effects, version, direction) => {
|
||||
await initNoRepeat(effects, migrations, direction === "from" ? version : currentVersion);
|
||||
return inner(effects, version, direction);
|
||||
};
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
export { properties } from "./properties.js";
|
||||
export { setConfig } from "./setConfig.js";
|
||||
export { getConfig, getConfigAndMatcher } from "./getConfig.js";
|
||||
export * as migrations from "./migrations.js";
|
||||
@@ -1,36 +0,0 @@
|
||||
import { YAML } from "../dependencies.js";
|
||||
import { exists } from "../util.js";
|
||||
// deno-lint-ignore no-explicit-any
|
||||
const asResult = (result) => ({ result: result });
|
||||
const noPropertiesFound = {
|
||||
result: {
|
||||
version: 2,
|
||||
data: {
|
||||
"Not Ready": {
|
||||
type: "string",
|
||||
value: "Could not find properties. The service might still be starting",
|
||||
qr: false,
|
||||
copyable: false,
|
||||
masked: false,
|
||||
description: "Fallback Message When Properties could not be found",
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
/**
|
||||
* Default will pull from a file (start9/stats.yaml) expected to be made on the main volume
|
||||
* Assumption: start9/stats.yaml is created by some process
|
||||
* Throws: stats.yaml isn't yaml
|
||||
* @param effects
|
||||
* @returns
|
||||
*/
|
||||
export const properties = async (effects) => {
|
||||
if (await exists(effects, { path: "start9/stats.yaml", volumeId: "main" }) ===
|
||||
false) {
|
||||
return noPropertiesFound;
|
||||
}
|
||||
return await effects.readFile({
|
||||
path: "start9/stats.yaml",
|
||||
volumeId: "main",
|
||||
}).then(YAML.parse).then(asResult);
|
||||
};
|
||||
@@ -1,26 +0,0 @@
|
||||
import { YAML } from "../dependencies.js";
|
||||
/**
|
||||
* Will set the config to the default start9/config.yaml
|
||||
* Assumption: start9/config.yaml is the location of the configuration
|
||||
* @param effects
|
||||
* @param newConfig Config to be written to start9/config.yaml
|
||||
* @param depends_on This would be the depends on for condition depends_on
|
||||
* @returns
|
||||
*/
|
||||
export const setConfig = async (effects, newConfig, dependsOn = {}) => {
|
||||
await effects.createDir({
|
||||
path: "start9",
|
||||
volumeId: "main",
|
||||
});
|
||||
await effects.writeFile({
|
||||
path: "start9/config.yaml",
|
||||
toWrite: YAML.stringify(newConfig),
|
||||
volumeId: "main",
|
||||
});
|
||||
const result = {
|
||||
signal: "SIGTERM",
|
||||
"depends-on": dependsOn,
|
||||
};
|
||||
return { result };
|
||||
};
|
||||
const _typeConversionCheck = setConfig;
|
||||
@@ -1,13 +0,0 @@
|
||||
export class IBuilder {
|
||||
constructor(a) {
|
||||
Object.defineProperty(this, "a", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: a
|
||||
});
|
||||
}
|
||||
build() {
|
||||
return this.a;
|
||||
}
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
import { IBuilder } from "./builder.js";
|
||||
export class Config extends IBuilder {
|
||||
static empty() {
|
||||
return new Config({});
|
||||
}
|
||||
static withValue(key, value) {
|
||||
return Config.empty().withValue(key, value);
|
||||
}
|
||||
static addValue(key, value) {
|
||||
return Config.empty().withValue(key, value);
|
||||
}
|
||||
static of(spec) {
|
||||
// deno-lint-ignore no-explicit-any
|
||||
const answer = {};
|
||||
for (const key in spec) {
|
||||
// deno-lint-ignore no-explicit-any
|
||||
answer[key] = spec[key].build();
|
||||
}
|
||||
return new Config(answer);
|
||||
}
|
||||
withValue(key, value) {
|
||||
return new Config({
|
||||
...this.a,
|
||||
[key]: value.build(),
|
||||
});
|
||||
}
|
||||
addValue(key, value) {
|
||||
return new Config({
|
||||
...this.a,
|
||||
[key]: value.build(),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
import * as dntShim from "../_dnt.test_shims.js";
|
||||
import { Config } from "./config.js";
|
||||
import { Value } from "./value.js";
|
||||
import { expect } from "../deps/deno.land/x/expect@v0.2.9/mod.js";
|
||||
const { test } = dntShim.Deno;
|
||||
test("String", () => {
|
||||
const bitcoinPropertiesBuilt = Config.of({
|
||||
"peer-tor-address": Value.string({
|
||||
name: "Peer tor address",
|
||||
default: "",
|
||||
description: "The Tor address of the peer interface",
|
||||
warning: null,
|
||||
nullable: false,
|
||||
masked: true,
|
||||
placeholder: null,
|
||||
pattern: null,
|
||||
"pattern-description": null,
|
||||
textarea: null,
|
||||
}),
|
||||
}).build();
|
||||
expect(JSON.stringify(bitcoinPropertiesBuilt)).toEqual(
|
||||
/*json*/ `{
|
||||
"peer-tor-address": {
|
||||
"type": "string",
|
||||
"name": "Peer tor address",
|
||||
"default": "",
|
||||
"description": "The Tor address of the peer interface",
|
||||
"warning": null,
|
||||
"nullable": false,
|
||||
"masked": true,
|
||||
"placeholder": null,
|
||||
"pattern": null,
|
||||
"pattern-description": null,
|
||||
"textarea": null
|
||||
}}`
|
||||
.replaceAll("\n", " ")
|
||||
.replaceAll(/\s{2,}/g, "")
|
||||
.replaceAll(": ", ":"));
|
||||
});
|
||||
@@ -1,68 +0,0 @@
|
||||
import { IBuilder } from "./builder.js";
|
||||
export class List extends IBuilder {
|
||||
// // deno-lint-ignore ban-types
|
||||
// static boolean<A extends Description & Default<boolean[]> & { range: string; spec: {}; default: boolean }>(a: A) {
|
||||
// return new List({
|
||||
// type: "list" as const,
|
||||
// subtype: "boolean" as const,
|
||||
// ...a,
|
||||
// });
|
||||
// }
|
||||
static string(a) {
|
||||
return new List({
|
||||
type: "list",
|
||||
subtype: "string",
|
||||
...a,
|
||||
});
|
||||
}
|
||||
static number(a) {
|
||||
return new List({
|
||||
type: "list",
|
||||
subtype: "number",
|
||||
...a,
|
||||
});
|
||||
}
|
||||
static enum(a) {
|
||||
return new List({
|
||||
type: "list",
|
||||
subtype: "enum",
|
||||
...a,
|
||||
});
|
||||
}
|
||||
static obj(a) {
|
||||
const { spec: previousSpec, ...rest } = a;
|
||||
const { spec: previousSpecSpec, ...restSpec } = previousSpec;
|
||||
const specSpec = previousSpecSpec.build();
|
||||
const spec = {
|
||||
...restSpec,
|
||||
spec: specSpec,
|
||||
};
|
||||
const value = {
|
||||
spec,
|
||||
...rest,
|
||||
};
|
||||
return new List({
|
||||
type: "list",
|
||||
subtype: "object",
|
||||
...value,
|
||||
});
|
||||
}
|
||||
static union(a) {
|
||||
const { spec: previousSpec, ...rest } = a;
|
||||
const { variants: previousVariants, ...restSpec } = previousSpec;
|
||||
const variants = previousVariants.build();
|
||||
const spec = {
|
||||
...restSpec,
|
||||
variants,
|
||||
};
|
||||
const value = {
|
||||
spec,
|
||||
...rest,
|
||||
};
|
||||
return new List({
|
||||
type: "list",
|
||||
subtype: "union",
|
||||
...value,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
export { Config } from "./config.js";
|
||||
export { List } from "./list.js";
|
||||
export { Value } from "./value.js";
|
||||
export { Variants } from "./variants.js";
|
||||
@@ -1,48 +0,0 @@
|
||||
import { IBuilder } from "./builder.js";
|
||||
export class Value extends IBuilder {
|
||||
static boolean(a) {
|
||||
return new Value({
|
||||
type: "boolean",
|
||||
...a,
|
||||
});
|
||||
}
|
||||
static string(a) {
|
||||
return new Value({
|
||||
type: "string",
|
||||
...a,
|
||||
});
|
||||
}
|
||||
static number(a) {
|
||||
return new Value({
|
||||
type: "number",
|
||||
...a,
|
||||
});
|
||||
}
|
||||
static enum(a) {
|
||||
return new Value({
|
||||
type: "enum",
|
||||
...a,
|
||||
});
|
||||
}
|
||||
static object(a) {
|
||||
const { spec: previousSpec, ...rest } = a;
|
||||
const spec = previousSpec.build();
|
||||
return new Value({
|
||||
type: "object",
|
||||
...rest,
|
||||
spec,
|
||||
});
|
||||
}
|
||||
static union(a) {
|
||||
const { variants: previousVariants, ...rest } = a;
|
||||
const variants = previousVariants.build();
|
||||
return new Value({
|
||||
type: "union",
|
||||
...rest,
|
||||
variants,
|
||||
});
|
||||
}
|
||||
static list(a) {
|
||||
return new Value(a.build());
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
import { IBuilder } from "./builder.js";
|
||||
export class Variants extends IBuilder {
|
||||
static of(a) {
|
||||
// deno-lint-ignore no-explicit-any
|
||||
const variants = {};
|
||||
for (const key in a) {
|
||||
// deno-lint-ignore no-explicit-any
|
||||
variants[key] = a[key].build();
|
||||
}
|
||||
return new Variants(variants);
|
||||
}
|
||||
static empty() {
|
||||
return Variants.of({});
|
||||
}
|
||||
static withVariant(key, value) {
|
||||
return Variants.empty().withVariant(key, value);
|
||||
}
|
||||
withVariant(key, value) {
|
||||
return new Variants({
|
||||
...this.a,
|
||||
[key]: value.build(),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
export * as matches from "./deps/deno.land/x/ts_matches@v5.3.0/mod.js";
|
||||
export * as YAML from "./deps/deno.land/std@0.140.0/encoding/yaml.js";
|
||||
@@ -1,14 +0,0 @@
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
// This module is browser compatible.
|
||||
export class DenoStdInternalError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = "DenoStdInternalError";
|
||||
}
|
||||
}
|
||||
/** Make an assertion, if not `true`, then throw. */
|
||||
export function assert(expr, msg = "") {
|
||||
if (!expr) {
|
||||
throw new DenoStdInternalError(msg);
|
||||
}
|
||||
}
|
||||
@@ -1,162 +0,0 @@
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
// This module is browser compatible.
|
||||
/**
|
||||
* An abstraction of multiple Uint8Arrays
|
||||
*/
|
||||
export class BytesList {
|
||||
constructor() {
|
||||
Object.defineProperty(this, "len", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: 0
|
||||
});
|
||||
Object.defineProperty(this, "chunks", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: []
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Total size of bytes
|
||||
*/
|
||||
size() {
|
||||
return this.len;
|
||||
}
|
||||
/**
|
||||
* Push bytes with given offset infos
|
||||
*/
|
||||
add(value, start = 0, end = value.byteLength) {
|
||||
if (value.byteLength === 0 || end - start === 0) {
|
||||
return;
|
||||
}
|
||||
checkRange(start, end, value.byteLength);
|
||||
this.chunks.push({
|
||||
value,
|
||||
end,
|
||||
start,
|
||||
offset: this.len,
|
||||
});
|
||||
this.len += end - start;
|
||||
}
|
||||
/**
|
||||
* Drop head `n` bytes.
|
||||
*/
|
||||
shift(n) {
|
||||
if (n === 0) {
|
||||
return;
|
||||
}
|
||||
if (this.len <= n) {
|
||||
this.chunks = [];
|
||||
this.len = 0;
|
||||
return;
|
||||
}
|
||||
const idx = this.getChunkIndex(n);
|
||||
this.chunks.splice(0, idx);
|
||||
const [chunk] = this.chunks;
|
||||
if (chunk) {
|
||||
const diff = n - chunk.offset;
|
||||
chunk.start += diff;
|
||||
}
|
||||
let offset = 0;
|
||||
for (const chunk of this.chunks) {
|
||||
chunk.offset = offset;
|
||||
offset += chunk.end - chunk.start;
|
||||
}
|
||||
this.len = offset;
|
||||
}
|
||||
/**
|
||||
* Find chunk index in which `pos` locates by binary-search
|
||||
* returns -1 if out of range
|
||||
*/
|
||||
getChunkIndex(pos) {
|
||||
let max = this.chunks.length;
|
||||
let min = 0;
|
||||
while (true) {
|
||||
const i = min + Math.floor((max - min) / 2);
|
||||
if (i < 0 || this.chunks.length <= i) {
|
||||
return -1;
|
||||
}
|
||||
const { offset, start, end } = this.chunks[i];
|
||||
const len = end - start;
|
||||
if (offset <= pos && pos < offset + len) {
|
||||
return i;
|
||||
}
|
||||
else if (offset + len <= pos) {
|
||||
min = i + 1;
|
||||
}
|
||||
else {
|
||||
max = i - 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get indexed byte from chunks
|
||||
*/
|
||||
get(i) {
|
||||
if (i < 0 || this.len <= i) {
|
||||
throw new Error("out of range");
|
||||
}
|
||||
const idx = this.getChunkIndex(i);
|
||||
const { value, offset, start } = this.chunks[idx];
|
||||
return value[start + i - offset];
|
||||
}
|
||||
/**
|
||||
* Iterator of bytes from given position
|
||||
*/
|
||||
*iterator(start = 0) {
|
||||
const startIdx = this.getChunkIndex(start);
|
||||
if (startIdx < 0)
|
||||
return;
|
||||
const first = this.chunks[startIdx];
|
||||
let firstOffset = start - first.offset;
|
||||
for (let i = startIdx; i < this.chunks.length; i++) {
|
||||
const chunk = this.chunks[i];
|
||||
for (let j = chunk.start + firstOffset; j < chunk.end; j++) {
|
||||
yield chunk.value[j];
|
||||
}
|
||||
firstOffset = 0;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns subset of bytes copied
|
||||
*/
|
||||
slice(start, end = this.len) {
|
||||
if (end === start) {
|
||||
return new Uint8Array();
|
||||
}
|
||||
checkRange(start, end, this.len);
|
||||
const result = new Uint8Array(end - start);
|
||||
const startIdx = this.getChunkIndex(start);
|
||||
const endIdx = this.getChunkIndex(end - 1);
|
||||
let written = 0;
|
||||
for (let i = startIdx; i < endIdx; i++) {
|
||||
const chunk = this.chunks[i];
|
||||
const len = chunk.end - chunk.start;
|
||||
result.set(chunk.value.subarray(chunk.start, chunk.end), written);
|
||||
written += len;
|
||||
}
|
||||
const last = this.chunks[endIdx];
|
||||
const rest = end - start - written;
|
||||
result.set(last.value.subarray(last.start, last.start + rest), written);
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Concatenate chunks into single Uint8Array copied.
|
||||
*/
|
||||
concat() {
|
||||
const result = new Uint8Array(this.len);
|
||||
let sum = 0;
|
||||
for (const { value, start, end } of this.chunks) {
|
||||
result.set(value.subarray(start, end), sum);
|
||||
sum += end - start;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
function checkRange(start, end, len) {
|
||||
if (start < 0 || len < start || end < 0 || len < end || end < start) {
|
||||
throw new Error("invalid range");
|
||||
}
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
// This module is browser compatible.
|
||||
/** Check whether binary arrays are equal to each other using 8-bit comparisons.
|
||||
* @private
|
||||
* @param a first array to check equality
|
||||
* @param b second array to check equality
|
||||
*/
|
||||
export function equalsNaive(a, b) {
|
||||
if (a.length !== b.length)
|
||||
return false;
|
||||
for (let i = 0; i < b.length; i++) {
|
||||
if (a[i] !== b[i])
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/** Check whether binary arrays are equal to each other using 32-bit comparisons.
|
||||
* @private
|
||||
* @param a first array to check equality
|
||||
* @param b second array to check equality
|
||||
*/
|
||||
export function equalsSimd(a, b) {
|
||||
if (a.length !== b.length)
|
||||
return false;
|
||||
const len = a.length;
|
||||
const compressable = Math.floor(len / 4);
|
||||
const compressedA = new Uint32Array(a.buffer, 0, compressable);
|
||||
const compressedB = new Uint32Array(b.buffer, 0, compressable);
|
||||
for (let i = compressable * 4; i < len; i++) {
|
||||
if (a[i] !== b[i])
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < compressedA.length; i++) {
|
||||
if (compressedA[i] !== compressedB[i])
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/** Check whether binary arrays are equal to each other.
|
||||
* @param a first array to check equality
|
||||
* @param b second array to check equality
|
||||
*/
|
||||
export function equals(a, b) {
|
||||
if (a.length < 1000)
|
||||
return equalsNaive(a, b);
|
||||
return equalsSimd(a, b);
|
||||
}
|
||||
@@ -1,241 +0,0 @@
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
// This module is browser compatible.
|
||||
/**
|
||||
* Provides helper functions to manipulate `Uint8Array` byte slices that are not
|
||||
* included on the `Uint8Array` prototype.
|
||||
*
|
||||
* @module
|
||||
*/
|
||||
/** Returns the index of the first occurrence of the needle array in the source
|
||||
* array, or -1 if it is not present.
|
||||
*
|
||||
* A start index can be specified as the third argument that begins the search
|
||||
* at that given index. The start index defaults to the start of the array.
|
||||
*
|
||||
* The complexity of this function is O(source.lenth * needle.length).
|
||||
*
|
||||
* ```ts
|
||||
* import { indexOfNeedle } from "./mod.ts";
|
||||
* const source = new Uint8Array([0, 1, 2, 1, 2, 1, 2, 3]);
|
||||
* const needle = new Uint8Array([1, 2]);
|
||||
* console.log(indexOfNeedle(source, needle)); // 1
|
||||
* console.log(indexOfNeedle(source, needle, 2)); // 3
|
||||
* ```
|
||||
*/
|
||||
export function indexOfNeedle(source, needle, start = 0) {
|
||||
if (start >= source.length) {
|
||||
return -1;
|
||||
}
|
||||
if (start < 0) {
|
||||
start = Math.max(0, source.length + start);
|
||||
}
|
||||
const s = needle[0];
|
||||
for (let i = start; i < source.length; i++) {
|
||||
if (source[i] !== s)
|
||||
continue;
|
||||
const pin = i;
|
||||
let matched = 1;
|
||||
let j = i;
|
||||
while (matched < needle.length) {
|
||||
j++;
|
||||
if (source[j] !== needle[j - pin]) {
|
||||
break;
|
||||
}
|
||||
matched++;
|
||||
}
|
||||
if (matched === needle.length) {
|
||||
return pin;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
/** Returns the index of the last occurrence of the needle array in the source
|
||||
* array, or -1 if it is not present.
|
||||
*
|
||||
* A start index can be specified as the third argument that begins the search
|
||||
* at that given index. The start index defaults to the end of the array.
|
||||
*
|
||||
* The complexity of this function is O(source.lenth * needle.length).
|
||||
*
|
||||
* ```ts
|
||||
* import { lastIndexOfNeedle } from "./mod.ts";
|
||||
* const source = new Uint8Array([0, 1, 2, 1, 2, 1, 2, 3]);
|
||||
* const needle = new Uint8Array([1, 2]);
|
||||
* console.log(lastIndexOfNeedle(source, needle)); // 5
|
||||
* console.log(lastIndexOfNeedle(source, needle, 4)); // 3
|
||||
* ```
|
||||
*/
|
||||
export function lastIndexOfNeedle(source, needle, start = source.length - 1) {
|
||||
if (start < 0) {
|
||||
return -1;
|
||||
}
|
||||
if (start >= source.length) {
|
||||
start = source.length - 1;
|
||||
}
|
||||
const e = needle[needle.length - 1];
|
||||
for (let i = start; i >= 0; i--) {
|
||||
if (source[i] !== e)
|
||||
continue;
|
||||
const pin = i;
|
||||
let matched = 1;
|
||||
let j = i;
|
||||
while (matched < needle.length) {
|
||||
j--;
|
||||
if (source[j] !== needle[needle.length - 1 - (pin - j)]) {
|
||||
break;
|
||||
}
|
||||
matched++;
|
||||
}
|
||||
if (matched === needle.length) {
|
||||
return pin - needle.length + 1;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
/** Returns true if the prefix array appears at the start of the source array,
|
||||
* false otherwise.
|
||||
*
|
||||
* The complexity of this function is O(prefix.length).
|
||||
*
|
||||
* ```ts
|
||||
* import { startsWith } from "./mod.ts";
|
||||
* const source = new Uint8Array([0, 1, 2, 1, 2, 1, 2, 3]);
|
||||
* const prefix = new Uint8Array([0, 1, 2]);
|
||||
* console.log(startsWith(source, prefix)); // true
|
||||
* ```
|
||||
*/
|
||||
export function startsWith(source, prefix) {
|
||||
for (let i = 0, max = prefix.length; i < max; i++) {
|
||||
if (source[i] !== prefix[i])
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/** Returns true if the suffix array appears at the end of the source array,
|
||||
* false otherwise.
|
||||
*
|
||||
* The complexity of this function is O(suffix.length).
|
||||
*
|
||||
* ```ts
|
||||
* import { endsWith } from "./mod.ts";
|
||||
* const source = new Uint8Array([0, 1, 2, 1, 2, 1, 2, 3]);
|
||||
* const suffix = new Uint8Array([1, 2, 3]);
|
||||
* console.log(endsWith(source, suffix)); // true
|
||||
* ```
|
||||
*/
|
||||
export function endsWith(source, suffix) {
|
||||
for (let srci = source.length - 1, sfxi = suffix.length - 1; sfxi >= 0; srci--, sfxi--) {
|
||||
if (source[srci] !== suffix[sfxi])
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/** Returns a new Uint8Array composed of `count` repetitions of the `source`
|
||||
* array.
|
||||
*
|
||||
* If `count` is negative, a `RangeError` is thrown.
|
||||
*
|
||||
* ```ts
|
||||
* import { repeat } from "./mod.ts";
|
||||
* const source = new Uint8Array([0, 1, 2]);
|
||||
* console.log(repeat(source, 3)); // [0, 1, 2, 0, 1, 2, 0, 1, 2]
|
||||
* console.log(repeat(source, 0)); // []
|
||||
* console.log(repeat(source, -1)); // RangeError
|
||||
* ```
|
||||
*/
|
||||
export function repeat(source, count) {
|
||||
if (count === 0) {
|
||||
return new Uint8Array();
|
||||
}
|
||||
if (count < 0) {
|
||||
throw new RangeError("bytes: negative repeat count");
|
||||
}
|
||||
else if ((source.length * count) / count !== source.length) {
|
||||
throw new Error("bytes: repeat count causes overflow");
|
||||
}
|
||||
const int = Math.floor(count);
|
||||
if (int !== count) {
|
||||
throw new Error("bytes: repeat count must be an integer");
|
||||
}
|
||||
const nb = new Uint8Array(source.length * count);
|
||||
let bp = copy(source, nb);
|
||||
for (; bp < nb.length; bp *= 2) {
|
||||
copy(nb.slice(0, bp), nb, bp);
|
||||
}
|
||||
return nb;
|
||||
}
|
||||
/** Concatenate the given arrays into a new Uint8Array.
|
||||
*
|
||||
* ```ts
|
||||
* import { concat } from "./mod.ts";
|
||||
* const a = new Uint8Array([0, 1, 2]);
|
||||
* const b = new Uint8Array([3, 4, 5]);
|
||||
* console.log(concat(a, b)); // [0, 1, 2, 3, 4, 5]
|
||||
*/
|
||||
export function concat(...buf) {
|
||||
let length = 0;
|
||||
for (const b of buf) {
|
||||
length += b.length;
|
||||
}
|
||||
const output = new Uint8Array(length);
|
||||
let index = 0;
|
||||
for (const b of buf) {
|
||||
output.set(b, index);
|
||||
index += b.length;
|
||||
}
|
||||
return output;
|
||||
}
|
||||
/** Returns true if the source array contains the needle array, false otherwise.
|
||||
*
|
||||
* A start index can be specified as the third argument that begins the search
|
||||
* at that given index. The start index defaults to the beginning of the array.
|
||||
*
|
||||
* The complexity of this function is O(source.length * needle.length).
|
||||
*
|
||||
* ```ts
|
||||
* import { includesNeedle } from "./mod.ts";
|
||||
* const source = new Uint8Array([0, 1, 2, 1, 2, 1, 2, 3]);
|
||||
* const needle = new Uint8Array([1, 2]);
|
||||
* console.log(includesNeedle(source, needle)); // true
|
||||
* console.log(includesNeedle(source, needle, 6)); // false
|
||||
* ```
|
||||
*/
|
||||
export function includesNeedle(source, needle, start = 0) {
|
||||
return indexOfNeedle(source, needle, start) !== -1;
|
||||
}
|
||||
/** Copy bytes from the `src` array to the `dst` array. Returns the number of
|
||||
* bytes copied.
|
||||
*
|
||||
* If the `src` array is larger than what the `dst` array can hold, only the
|
||||
* amount of bytes that fit in the `dst` array are copied.
|
||||
*
|
||||
* An offset can be specified as the third argument that begins the copy at
|
||||
* that given index in the `dst` array. The offset defaults to the beginning of
|
||||
* the array.
|
||||
*
|
||||
* ```ts
|
||||
* import { copy } from "./mod.ts";
|
||||
* const src = new Uint8Array([9, 8, 7]);
|
||||
* const dst = new Uint8Array([0, 1, 2, 3, 4, 5]);
|
||||
* console.log(copy(src, dst)); // 3
|
||||
* console.log(dst); // [9, 8, 7, 3, 4, 5]
|
||||
* ```
|
||||
*
|
||||
* ```ts
|
||||
* import { copy } from "./mod.ts";
|
||||
* const src = new Uint8Array([1, 1, 1, 1]);
|
||||
* const dst = new Uint8Array([0, 0, 0, 0]);
|
||||
* console.log(copy(src, dst, 1)); // 3
|
||||
* console.log(dst); // [0, 1, 1, 1]
|
||||
* ```
|
||||
*/
|
||||
export function copy(src, dst, off = 0) {
|
||||
off = Math.max(0, Math.min(off, dst.byteLength));
|
||||
const dstBytesAvailable = dst.byteLength - off;
|
||||
if (src.byteLength > dstBytesAvailable) {
|
||||
src = src.subarray(0, dstBytesAvailable);
|
||||
}
|
||||
dst.set(src, off);
|
||||
return src.byteLength;
|
||||
}
|
||||
export { equals } from "./equals.js";
|
||||
@@ -1,684 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { YAMLError } from "../error.js";
|
||||
import * as common from "../utils.js";
|
||||
import { DumperState } from "./dumper_state.js";
|
||||
const _toString = Object.prototype.toString;
|
||||
const { hasOwn } = Object;
|
||||
const CHAR_TAB = 0x09; /* Tab */
|
||||
const CHAR_LINE_FEED = 0x0a; /* LF */
|
||||
const CHAR_SPACE = 0x20; /* Space */
|
||||
const CHAR_EXCLAMATION = 0x21; /* ! */
|
||||
const CHAR_DOUBLE_QUOTE = 0x22; /* " */
|
||||
const CHAR_SHARP = 0x23; /* # */
|
||||
const CHAR_PERCENT = 0x25; /* % */
|
||||
const CHAR_AMPERSAND = 0x26; /* & */
|
||||
const CHAR_SINGLE_QUOTE = 0x27; /* ' */
|
||||
const CHAR_ASTERISK = 0x2a; /* * */
|
||||
const CHAR_COMMA = 0x2c; /* , */
|
||||
const CHAR_MINUS = 0x2d; /* - */
|
||||
const CHAR_COLON = 0x3a; /* : */
|
||||
const CHAR_GREATER_THAN = 0x3e; /* > */
|
||||
const CHAR_QUESTION = 0x3f; /* ? */
|
||||
const CHAR_COMMERCIAL_AT = 0x40; /* @ */
|
||||
const CHAR_LEFT_SQUARE_BRACKET = 0x5b; /* [ */
|
||||
const CHAR_RIGHT_SQUARE_BRACKET = 0x5d; /* ] */
|
||||
const CHAR_GRAVE_ACCENT = 0x60; /* ` */
|
||||
const CHAR_LEFT_CURLY_BRACKET = 0x7b; /* { */
|
||||
const CHAR_VERTICAL_LINE = 0x7c; /* | */
|
||||
const CHAR_RIGHT_CURLY_BRACKET = 0x7d; /* } */
|
||||
const ESCAPE_SEQUENCES = {};
|
||||
ESCAPE_SEQUENCES[0x00] = "\\0";
|
||||
ESCAPE_SEQUENCES[0x07] = "\\a";
|
||||
ESCAPE_SEQUENCES[0x08] = "\\b";
|
||||
ESCAPE_SEQUENCES[0x09] = "\\t";
|
||||
ESCAPE_SEQUENCES[0x0a] = "\\n";
|
||||
ESCAPE_SEQUENCES[0x0b] = "\\v";
|
||||
ESCAPE_SEQUENCES[0x0c] = "\\f";
|
||||
ESCAPE_SEQUENCES[0x0d] = "\\r";
|
||||
ESCAPE_SEQUENCES[0x1b] = "\\e";
|
||||
ESCAPE_SEQUENCES[0x22] = '\\"';
|
||||
ESCAPE_SEQUENCES[0x5c] = "\\\\";
|
||||
ESCAPE_SEQUENCES[0x85] = "\\N";
|
||||
ESCAPE_SEQUENCES[0xa0] = "\\_";
|
||||
ESCAPE_SEQUENCES[0x2028] = "\\L";
|
||||
ESCAPE_SEQUENCES[0x2029] = "\\P";
|
||||
const DEPRECATED_BOOLEANS_SYNTAX = [
|
||||
"y",
|
||||
"Y",
|
||||
"yes",
|
||||
"Yes",
|
||||
"YES",
|
||||
"on",
|
||||
"On",
|
||||
"ON",
|
||||
"n",
|
||||
"N",
|
||||
"no",
|
||||
"No",
|
||||
"NO",
|
||||
"off",
|
||||
"Off",
|
||||
"OFF",
|
||||
];
|
||||
function encodeHex(character) {
|
||||
const string = character.toString(16).toUpperCase();
|
||||
let handle;
|
||||
let length;
|
||||
if (character <= 0xff) {
|
||||
handle = "x";
|
||||
length = 2;
|
||||
}
|
||||
else if (character <= 0xffff) {
|
||||
handle = "u";
|
||||
length = 4;
|
||||
}
|
||||
else if (character <= 0xffffffff) {
|
||||
handle = "U";
|
||||
length = 8;
|
||||
}
|
||||
else {
|
||||
throw new YAMLError("code point within a string may not be greater than 0xFFFFFFFF");
|
||||
}
|
||||
return `\\${handle}${common.repeat("0", length - string.length)}${string}`;
|
||||
}
|
||||
// Indents every line in a string. Empty lines (\n only) are not indented.
|
||||
function indentString(string, spaces) {
|
||||
const ind = common.repeat(" ", spaces), length = string.length;
|
||||
let position = 0, next = -1, result = "", line;
|
||||
while (position < length) {
|
||||
next = string.indexOf("\n", position);
|
||||
if (next === -1) {
|
||||
line = string.slice(position);
|
||||
position = length;
|
||||
}
|
||||
else {
|
||||
line = string.slice(position, next + 1);
|
||||
position = next + 1;
|
||||
}
|
||||
if (line.length && line !== "\n")
|
||||
result += ind;
|
||||
result += line;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function generateNextLine(state, level) {
|
||||
return `\n${common.repeat(" ", state.indent * level)}`;
|
||||
}
|
||||
function testImplicitResolving(state, str) {
|
||||
let type;
|
||||
for (let index = 0, length = state.implicitTypes.length; index < length; index += 1) {
|
||||
type = state.implicitTypes[index];
|
||||
if (type.resolve(str)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// [33] s-white ::= s-space | s-tab
|
||||
function isWhitespace(c) {
|
||||
return c === CHAR_SPACE || c === CHAR_TAB;
|
||||
}
|
||||
// Returns true if the character can be printed without escaping.
|
||||
// From YAML 1.2: "any allowed characters known to be non-printable
|
||||
// should also be escaped. [However,] This isn’t mandatory"
|
||||
// Derived from nb-char - \t - #x85 - #xA0 - #x2028 - #x2029.
|
||||
function isPrintable(c) {
|
||||
return ((0x00020 <= c && c <= 0x00007e) ||
|
||||
(0x000a1 <= c && c <= 0x00d7ff && c !== 0x2028 && c !== 0x2029) ||
|
||||
(0x0e000 <= c && c <= 0x00fffd && c !== 0xfeff) /* BOM */ ||
|
||||
(0x10000 <= c && c <= 0x10ffff));
|
||||
}
|
||||
// Simplified test for values allowed after the first character in plain style.
|
||||
function isPlainSafe(c) {
|
||||
// Uses a subset of nb-char - c-flow-indicator - ":" - "#"
|
||||
// where nb-char ::= c-printable - b-char - c-byte-order-mark.
|
||||
return (isPrintable(c) &&
|
||||
c !== 0xfeff &&
|
||||
// - c-flow-indicator
|
||||
c !== CHAR_COMMA &&
|
||||
c !== CHAR_LEFT_SQUARE_BRACKET &&
|
||||
c !== CHAR_RIGHT_SQUARE_BRACKET &&
|
||||
c !== CHAR_LEFT_CURLY_BRACKET &&
|
||||
c !== CHAR_RIGHT_CURLY_BRACKET &&
|
||||
// - ":" - "#"
|
||||
c !== CHAR_COLON &&
|
||||
c !== CHAR_SHARP);
|
||||
}
|
||||
// Simplified test for values allowed as the first character in plain style.
|
||||
function isPlainSafeFirst(c) {
|
||||
// Uses a subset of ns-char - c-indicator
|
||||
// where ns-char = nb-char - s-white.
|
||||
return (isPrintable(c) &&
|
||||
c !== 0xfeff &&
|
||||
!isWhitespace(c) && // - s-white
|
||||
// - (c-indicator ::=
|
||||
// “-” | “?” | “:” | “,” | “[” | “]” | “{” | “}”
|
||||
c !== CHAR_MINUS &&
|
||||
c !== CHAR_QUESTION &&
|
||||
c !== CHAR_COLON &&
|
||||
c !== CHAR_COMMA &&
|
||||
c !== CHAR_LEFT_SQUARE_BRACKET &&
|
||||
c !== CHAR_RIGHT_SQUARE_BRACKET &&
|
||||
c !== CHAR_LEFT_CURLY_BRACKET &&
|
||||
c !== CHAR_RIGHT_CURLY_BRACKET &&
|
||||
// | “#” | “&” | “*” | “!” | “|” | “>” | “'” | “"”
|
||||
c !== CHAR_SHARP &&
|
||||
c !== CHAR_AMPERSAND &&
|
||||
c !== CHAR_ASTERISK &&
|
||||
c !== CHAR_EXCLAMATION &&
|
||||
c !== CHAR_VERTICAL_LINE &&
|
||||
c !== CHAR_GREATER_THAN &&
|
||||
c !== CHAR_SINGLE_QUOTE &&
|
||||
c !== CHAR_DOUBLE_QUOTE &&
|
||||
// | “%” | “@” | “`”)
|
||||
c !== CHAR_PERCENT &&
|
||||
c !== CHAR_COMMERCIAL_AT &&
|
||||
c !== CHAR_GRAVE_ACCENT);
|
||||
}
|
||||
// Determines whether block indentation indicator is required.
|
||||
function needIndentIndicator(string) {
|
||||
const leadingSpaceRe = /^\n* /;
|
||||
return leadingSpaceRe.test(string);
|
||||
}
|
||||
const STYLE_PLAIN = 1, STYLE_SINGLE = 2, STYLE_LITERAL = 3, STYLE_FOLDED = 4, STYLE_DOUBLE = 5;
|
||||
// Determines which scalar styles are possible and returns the preferred style.
|
||||
// lineWidth = -1 => no limit.
|
||||
// Pre-conditions: str.length > 0.
|
||||
// Post-conditions:
|
||||
// STYLE_PLAIN or STYLE_SINGLE => no \n are in the string.
|
||||
// STYLE_LITERAL => no lines are suitable for folding (or lineWidth is -1).
|
||||
// STYLE_FOLDED => a line > lineWidth and can be folded (and lineWidth != -1).
|
||||
function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, testAmbiguousType) {
|
||||
const shouldTrackWidth = lineWidth !== -1;
|
||||
let hasLineBreak = false, hasFoldableLine = false, // only checked if shouldTrackWidth
|
||||
previousLineBreak = -1, // count the first line correctly
|
||||
plain = isPlainSafeFirst(string.charCodeAt(0)) &&
|
||||
!isWhitespace(string.charCodeAt(string.length - 1));
|
||||
let char, i;
|
||||
if (singleLineOnly) {
|
||||
// Case: no block styles.
|
||||
// Check for disallowed characters to rule out plain and single.
|
||||
for (i = 0; i < string.length; i++) {
|
||||
char = string.charCodeAt(i);
|
||||
if (!isPrintable(char)) {
|
||||
return STYLE_DOUBLE;
|
||||
}
|
||||
plain = plain && isPlainSafe(char);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Case: block styles permitted.
|
||||
for (i = 0; i < string.length; i++) {
|
||||
char = string.charCodeAt(i);
|
||||
if (char === CHAR_LINE_FEED) {
|
||||
hasLineBreak = true;
|
||||
// Check if any line can be folded.
|
||||
if (shouldTrackWidth) {
|
||||
hasFoldableLine = hasFoldableLine ||
|
||||
// Foldable line = too long, and not more-indented.
|
||||
(i - previousLineBreak - 1 > lineWidth &&
|
||||
string[previousLineBreak + 1] !== " ");
|
||||
previousLineBreak = i;
|
||||
}
|
||||
}
|
||||
else if (!isPrintable(char)) {
|
||||
return STYLE_DOUBLE;
|
||||
}
|
||||
plain = plain && isPlainSafe(char);
|
||||
}
|
||||
// in case the end is missing a \n
|
||||
hasFoldableLine = hasFoldableLine ||
|
||||
(shouldTrackWidth &&
|
||||
i - previousLineBreak - 1 > lineWidth &&
|
||||
string[previousLineBreak + 1] !== " ");
|
||||
}
|
||||
// Although every style can represent \n without escaping, prefer block styles
|
||||
// for multiline, since they're more readable and they don't add empty lines.
|
||||
// Also prefer folding a super-long line.
|
||||
if (!hasLineBreak && !hasFoldableLine) {
|
||||
// Strings interpretable as another type have to be quoted;
|
||||
// e.g. the string 'true' vs. the boolean true.
|
||||
return plain && !testAmbiguousType(string) ? STYLE_PLAIN : STYLE_SINGLE;
|
||||
}
|
||||
// Edge case: block indentation indicator can only have one digit.
|
||||
if (indentPerLevel > 9 && needIndentIndicator(string)) {
|
||||
return STYLE_DOUBLE;
|
||||
}
|
||||
// At this point we know block styles are valid.
|
||||
// Prefer literal style unless we want to fold.
|
||||
return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL;
|
||||
}
|
||||
// Greedy line breaking.
|
||||
// Picks the longest line under the limit each time,
|
||||
// otherwise settles for the shortest line over the limit.
|
||||
// NB. More-indented lines *cannot* be folded, as that would add an extra \n.
|
||||
function foldLine(line, width) {
|
||||
if (line === "" || line[0] === " ")
|
||||
return line;
|
||||
// Since a more-indented line adds a \n, breaks can't be followed by a space.
|
||||
const breakRe = / [^ ]/g; // note: the match index will always be <= length-2.
|
||||
let match;
|
||||
// start is an inclusive index. end, curr, and next are exclusive.
|
||||
let start = 0, end, curr = 0, next = 0;
|
||||
let result = "";
|
||||
// Invariants: 0 <= start <= length-1.
|
||||
// 0 <= curr <= next <= max(0, length-2). curr - start <= width.
|
||||
// Inside the loop:
|
||||
// A match implies length >= 2, so curr and next are <= length-2.
|
||||
// tslint:disable-next-line:no-conditional-assignment
|
||||
while ((match = breakRe.exec(line))) {
|
||||
next = match.index;
|
||||
// maintain invariant: curr - start <= width
|
||||
if (next - start > width) {
|
||||
end = curr > start ? curr : next; // derive end <= length-2
|
||||
result += `\n${line.slice(start, end)}`;
|
||||
// skip the space that was output as \n
|
||||
start = end + 1; // derive start <= length-1
|
||||
}
|
||||
curr = next;
|
||||
}
|
||||
// By the invariants, start <= length-1, so there is something left over.
|
||||
// It is either the whole string or a part starting from non-whitespace.
|
||||
result += "\n";
|
||||
// Insert a break if the remainder is too long and there is a break available.
|
||||
if (line.length - start > width && curr > start) {
|
||||
result += `${line.slice(start, curr)}\n${line.slice(curr + 1)}`;
|
||||
}
|
||||
else {
|
||||
result += line.slice(start);
|
||||
}
|
||||
return result.slice(1); // drop extra \n joiner
|
||||
}
|
||||
// (See the note for writeScalar.)
|
||||
function dropEndingNewline(string) {
|
||||
return string[string.length - 1] === "\n" ? string.slice(0, -1) : string;
|
||||
}
|
||||
// Note: a long line without a suitable break point will exceed the width limit.
|
||||
// Pre-conditions: every char in str isPrintable, str.length > 0, width > 0.
|
||||
function foldString(string, width) {
|
||||
// In folded style, $k$ consecutive newlines output as $k+1$ newlines—
|
||||
// unless they're before or after a more-indented line, or at the very
|
||||
// beginning or end, in which case $k$ maps to $k$.
|
||||
// Therefore, parse each chunk as newline(s) followed by a content line.
|
||||
const lineRe = /(\n+)([^\n]*)/g;
|
||||
// first line (possibly an empty line)
|
||||
let result = (() => {
|
||||
let nextLF = string.indexOf("\n");
|
||||
nextLF = nextLF !== -1 ? nextLF : string.length;
|
||||
lineRe.lastIndex = nextLF;
|
||||
return foldLine(string.slice(0, nextLF), width);
|
||||
})();
|
||||
// If we haven't reached the first content line yet, don't add an extra \n.
|
||||
let prevMoreIndented = string[0] === "\n" || string[0] === " ";
|
||||
let moreIndented;
|
||||
// rest of the lines
|
||||
let match;
|
||||
// tslint:disable-next-line:no-conditional-assignment
|
||||
while ((match = lineRe.exec(string))) {
|
||||
const prefix = match[1], line = match[2];
|
||||
moreIndented = line[0] === " ";
|
||||
result += prefix +
|
||||
(!prevMoreIndented && !moreIndented && line !== "" ? "\n" : "") +
|
||||
foldLine(line, width);
|
||||
prevMoreIndented = moreIndented;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
// Escapes a double-quoted string.
|
||||
function escapeString(string) {
|
||||
let result = "";
|
||||
let char, nextChar;
|
||||
let escapeSeq;
|
||||
for (let i = 0; i < string.length; i++) {
|
||||
char = string.charCodeAt(i);
|
||||
// Check for surrogate pairs (reference Unicode 3.0 section "3.7 Surrogates").
|
||||
if (char >= 0xd800 && char <= 0xdbff /* high surrogate */) {
|
||||
nextChar = string.charCodeAt(i + 1);
|
||||
if (nextChar >= 0xdc00 && nextChar <= 0xdfff /* low surrogate */) {
|
||||
// Combine the surrogate pair and store it escaped.
|
||||
result += encodeHex((char - 0xd800) * 0x400 + nextChar - 0xdc00 + 0x10000);
|
||||
// Advance index one extra since we already used that char here.
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
escapeSeq = ESCAPE_SEQUENCES[char];
|
||||
result += !escapeSeq && isPrintable(char)
|
||||
? string[i]
|
||||
: escapeSeq || encodeHex(char);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
// Pre-conditions: string is valid for a block scalar, 1 <= indentPerLevel <= 9.
|
||||
function blockHeader(string, indentPerLevel) {
|
||||
const indentIndicator = needIndentIndicator(string)
|
||||
? String(indentPerLevel)
|
||||
: "";
|
||||
// note the special case: the string '\n' counts as a "trailing" empty line.
|
||||
const clip = string[string.length - 1] === "\n";
|
||||
const keep = clip && (string[string.length - 2] === "\n" || string === "\n");
|
||||
const chomp = keep ? "+" : clip ? "" : "-";
|
||||
return `${indentIndicator}${chomp}\n`;
|
||||
}
|
||||
// Note: line breaking/folding is implemented for only the folded style.
|
||||
// NB. We drop the last trailing newline (if any) of a returned block scalar
|
||||
// since the dumper adds its own newline. This always works:
|
||||
// • No ending newline => unaffected; already using strip "-" chomping.
|
||||
// • Ending newline => removed then restored.
|
||||
// Importantly, this keeps the "+" chomp indicator from gaining an extra line.
|
||||
function writeScalar(state, string, level, iskey) {
|
||||
state.dump = (() => {
|
||||
if (string.length === 0) {
|
||||
return "''";
|
||||
}
|
||||
if (!state.noCompatMode &&
|
||||
DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1) {
|
||||
return `'${string}'`;
|
||||
}
|
||||
const indent = state.indent * Math.max(1, level); // no 0-indent scalars
|
||||
// As indentation gets deeper, let the width decrease monotonically
|
||||
// to the lower bound min(state.lineWidth, 40).
|
||||
// Note that this implies
|
||||
// state.lineWidth ≤ 40 + state.indent: width is fixed at the lower bound.
|
||||
// state.lineWidth > 40 + state.indent: width decreases until the lower
|
||||
// bound.
|
||||
// This behaves better than a constant minimum width which disallows
|
||||
// narrower options, or an indent threshold which causes the width
|
||||
// to suddenly increase.
|
||||
const lineWidth = state.lineWidth === -1
|
||||
? -1
|
||||
: Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent);
|
||||
// Without knowing if keys are implicit/explicit,
|
||||
// assume implicit for safety.
|
||||
const singleLineOnly = iskey ||
|
||||
// No block styles in flow mode.
|
||||
(state.flowLevel > -1 && level >= state.flowLevel);
|
||||
function testAmbiguity(str) {
|
||||
return testImplicitResolving(state, str);
|
||||
}
|
||||
switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth, testAmbiguity)) {
|
||||
case STYLE_PLAIN:
|
||||
return string;
|
||||
case STYLE_SINGLE:
|
||||
return `'${string.replace(/'/g, "''")}'`;
|
||||
case STYLE_LITERAL:
|
||||
return `|${blockHeader(string, state.indent)}${dropEndingNewline(indentString(string, indent))}`;
|
||||
case STYLE_FOLDED:
|
||||
return `>${blockHeader(string, state.indent)}${dropEndingNewline(indentString(foldString(string, lineWidth), indent))}`;
|
||||
case STYLE_DOUBLE:
|
||||
return `"${escapeString(string)}"`;
|
||||
default:
|
||||
throw new YAMLError("impossible error: invalid scalar style");
|
||||
}
|
||||
})();
|
||||
}
|
||||
function writeFlowSequence(state, level, object) {
|
||||
let _result = "";
|
||||
const _tag = state.tag;
|
||||
for (let index = 0, length = object.length; index < length; index += 1) {
|
||||
// Write only valid elements.
|
||||
if (writeNode(state, level, object[index], false, false)) {
|
||||
if (index !== 0)
|
||||
_result += `,${!state.condenseFlow ? " " : ""}`;
|
||||
_result += state.dump;
|
||||
}
|
||||
}
|
||||
state.tag = _tag;
|
||||
state.dump = `[${_result}]`;
|
||||
}
|
||||
function writeBlockSequence(state, level, object, compact = false) {
|
||||
let _result = "";
|
||||
const _tag = state.tag;
|
||||
for (let index = 0, length = object.length; index < length; index += 1) {
|
||||
// Write only valid elements.
|
||||
if (writeNode(state, level + 1, object[index], true, true)) {
|
||||
if (!compact || index !== 0) {
|
||||
_result += generateNextLine(state, level);
|
||||
}
|
||||
if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {
|
||||
_result += "-";
|
||||
}
|
||||
else {
|
||||
_result += "- ";
|
||||
}
|
||||
_result += state.dump;
|
||||
}
|
||||
}
|
||||
state.tag = _tag;
|
||||
state.dump = _result || "[]"; // Empty sequence if no valid values.
|
||||
}
|
||||
function writeFlowMapping(state, level, object) {
|
||||
let _result = "";
|
||||
const _tag = state.tag, objectKeyList = Object.keys(object);
|
||||
let pairBuffer, objectKey, objectValue;
|
||||
for (let index = 0, length = objectKeyList.length; index < length; index += 1) {
|
||||
pairBuffer = state.condenseFlow ? '"' : "";
|
||||
if (index !== 0)
|
||||
pairBuffer += ", ";
|
||||
objectKey = objectKeyList[index];
|
||||
objectValue = object[objectKey];
|
||||
if (!writeNode(state, level, objectKey, false, false)) {
|
||||
continue; // Skip this pair because of invalid key;
|
||||
}
|
||||
if (state.dump.length > 1024)
|
||||
pairBuffer += "? ";
|
||||
pairBuffer += `${state.dump}${state.condenseFlow ? '"' : ""}:${state.condenseFlow ? "" : " "}`;
|
||||
if (!writeNode(state, level, objectValue, false, false)) {
|
||||
continue; // Skip this pair because of invalid value.
|
||||
}
|
||||
pairBuffer += state.dump;
|
||||
// Both key and value are valid.
|
||||
_result += pairBuffer;
|
||||
}
|
||||
state.tag = _tag;
|
||||
state.dump = `{${_result}}`;
|
||||
}
|
||||
function writeBlockMapping(state, level, object, compact = false) {
|
||||
const _tag = state.tag, objectKeyList = Object.keys(object);
|
||||
let _result = "";
|
||||
// Allow sorting keys so that the output file is deterministic
|
||||
if (state.sortKeys === true) {
|
||||
// Default sorting
|
||||
objectKeyList.sort();
|
||||
}
|
||||
else if (typeof state.sortKeys === "function") {
|
||||
// Custom sort function
|
||||
objectKeyList.sort(state.sortKeys);
|
||||
}
|
||||
else if (state.sortKeys) {
|
||||
// Something is wrong
|
||||
throw new YAMLError("sortKeys must be a boolean or a function");
|
||||
}
|
||||
let pairBuffer = "", objectKey, objectValue, explicitPair;
|
||||
for (let index = 0, length = objectKeyList.length; index < length; index += 1) {
|
||||
pairBuffer = "";
|
||||
if (!compact || index !== 0) {
|
||||
pairBuffer += generateNextLine(state, level);
|
||||
}
|
||||
objectKey = objectKeyList[index];
|
||||
objectValue = object[objectKey];
|
||||
if (!writeNode(state, level + 1, objectKey, true, true, true)) {
|
||||
continue; // Skip this pair because of invalid key.
|
||||
}
|
||||
explicitPair = (state.tag !== null && state.tag !== "?") ||
|
||||
(state.dump && state.dump.length > 1024);
|
||||
if (explicitPair) {
|
||||
if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {
|
||||
pairBuffer += "?";
|
||||
}
|
||||
else {
|
||||
pairBuffer += "? ";
|
||||
}
|
||||
}
|
||||
pairBuffer += state.dump;
|
||||
if (explicitPair) {
|
||||
pairBuffer += generateNextLine(state, level);
|
||||
}
|
||||
if (!writeNode(state, level + 1, objectValue, true, explicitPair)) {
|
||||
continue; // Skip this pair because of invalid value.
|
||||
}
|
||||
if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {
|
||||
pairBuffer += ":";
|
||||
}
|
||||
else {
|
||||
pairBuffer += ": ";
|
||||
}
|
||||
pairBuffer += state.dump;
|
||||
// Both key and value are valid.
|
||||
_result += pairBuffer;
|
||||
}
|
||||
state.tag = _tag;
|
||||
state.dump = _result || "{}"; // Empty mapping if no valid pairs.
|
||||
}
|
||||
function detectType(state, object, explicit = false) {
|
||||
const typeList = explicit ? state.explicitTypes : state.implicitTypes;
|
||||
let type;
|
||||
let style;
|
||||
let _result;
|
||||
for (let index = 0, length = typeList.length; index < length; index += 1) {
|
||||
type = typeList[index];
|
||||
if ((type.instanceOf || type.predicate) &&
|
||||
(!type.instanceOf ||
|
||||
(typeof object === "object" && object instanceof type.instanceOf)) &&
|
||||
(!type.predicate || type.predicate(object))) {
|
||||
state.tag = explicit ? type.tag : "?";
|
||||
if (type.represent) {
|
||||
style = state.styleMap[type.tag] || type.defaultStyle;
|
||||
if (_toString.call(type.represent) === "[object Function]") {
|
||||
_result = type.represent(object, style);
|
||||
}
|
||||
else if (hasOwn(type.represent, style)) {
|
||||
_result = type.represent[style](object, style);
|
||||
}
|
||||
else {
|
||||
throw new YAMLError(`!<${type.tag}> tag resolver accepts not "${style}" style`);
|
||||
}
|
||||
state.dump = _result;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// Serializes `object` and writes it to global `result`.
|
||||
// Returns true on success, or false on invalid object.
|
||||
//
|
||||
function writeNode(state, level, object, block, compact, iskey = false) {
|
||||
state.tag = null;
|
||||
state.dump = object;
|
||||
if (!detectType(state, object, false)) {
|
||||
detectType(state, object, true);
|
||||
}
|
||||
const type = _toString.call(state.dump);
|
||||
if (block) {
|
||||
block = state.flowLevel < 0 || state.flowLevel > level;
|
||||
}
|
||||
const objectOrArray = type === "[object Object]" || type === "[object Array]";
|
||||
let duplicateIndex = -1;
|
||||
let duplicate = false;
|
||||
if (objectOrArray) {
|
||||
duplicateIndex = state.duplicates.indexOf(object);
|
||||
duplicate = duplicateIndex !== -1;
|
||||
}
|
||||
if ((state.tag !== null && state.tag !== "?") ||
|
||||
duplicate ||
|
||||
(state.indent !== 2 && level > 0)) {
|
||||
compact = false;
|
||||
}
|
||||
if (duplicate && state.usedDuplicates[duplicateIndex]) {
|
||||
state.dump = `*ref_${duplicateIndex}`;
|
||||
}
|
||||
else {
|
||||
if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) {
|
||||
state.usedDuplicates[duplicateIndex] = true;
|
||||
}
|
||||
if (type === "[object Object]") {
|
||||
if (block && Object.keys(state.dump).length !== 0) {
|
||||
writeBlockMapping(state, level, state.dump, compact);
|
||||
if (duplicate) {
|
||||
state.dump = `&ref_${duplicateIndex}${state.dump}`;
|
||||
}
|
||||
}
|
||||
else {
|
||||
writeFlowMapping(state, level, state.dump);
|
||||
if (duplicate) {
|
||||
state.dump = `&ref_${duplicateIndex} ${state.dump}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (type === "[object Array]") {
|
||||
const arrayLevel = state.noArrayIndent && level > 0 ? level - 1 : level;
|
||||
if (block && state.dump.length !== 0) {
|
||||
writeBlockSequence(state, arrayLevel, state.dump, compact);
|
||||
if (duplicate) {
|
||||
state.dump = `&ref_${duplicateIndex}${state.dump}`;
|
||||
}
|
||||
}
|
||||
else {
|
||||
writeFlowSequence(state, arrayLevel, state.dump);
|
||||
if (duplicate) {
|
||||
state.dump = `&ref_${duplicateIndex} ${state.dump}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (type === "[object String]") {
|
||||
if (state.tag !== "?") {
|
||||
writeScalar(state, state.dump, level, iskey);
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (state.skipInvalid)
|
||||
return false;
|
||||
throw new YAMLError(`unacceptable kind of an object to dump ${type}`);
|
||||
}
|
||||
if (state.tag !== null && state.tag !== "?") {
|
||||
state.dump = `!<${state.tag}> ${state.dump}`;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function inspectNode(object, objects, duplicatesIndexes) {
|
||||
if (object !== null && typeof object === "object") {
|
||||
const index = objects.indexOf(object);
|
||||
if (index !== -1) {
|
||||
if (duplicatesIndexes.indexOf(index) === -1) {
|
||||
duplicatesIndexes.push(index);
|
||||
}
|
||||
}
|
||||
else {
|
||||
objects.push(object);
|
||||
if (Array.isArray(object)) {
|
||||
for (let idx = 0, length = object.length; idx < length; idx += 1) {
|
||||
inspectNode(object[idx], objects, duplicatesIndexes);
|
||||
}
|
||||
}
|
||||
else {
|
||||
const objectKeyList = Object.keys(object);
|
||||
for (let idx = 0, length = objectKeyList.length; idx < length; idx += 1) {
|
||||
inspectNode(object[objectKeyList[idx]], objects, duplicatesIndexes);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function getDuplicateReferences(object, state) {
|
||||
const objects = [], duplicatesIndexes = [];
|
||||
inspectNode(object, objects, duplicatesIndexes);
|
||||
const length = duplicatesIndexes.length;
|
||||
for (let index = 0; index < length; index += 1) {
|
||||
state.duplicates.push(objects[duplicatesIndexes[index]]);
|
||||
}
|
||||
state.usedDuplicates = Array.from({ length });
|
||||
}
|
||||
export function dump(input, options) {
|
||||
options = options || {};
|
||||
const state = new DumperState(options);
|
||||
if (!state.noRefs)
|
||||
getDuplicateReferences(input, state);
|
||||
if (writeNode(state, 0, input, true, true))
|
||||
return `${state.dump}\n`;
|
||||
return "";
|
||||
}
|
||||
@@ -1,148 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { State } from "../state.js";
|
||||
const { hasOwn } = Object;
|
||||
function compileStyleMap(schema, map) {
|
||||
if (typeof map === "undefined" || map === null)
|
||||
return {};
|
||||
let type;
|
||||
const result = {};
|
||||
const keys = Object.keys(map);
|
||||
let tag, style;
|
||||
for (let index = 0, length = keys.length; index < length; index += 1) {
|
||||
tag = keys[index];
|
||||
style = String(map[tag]);
|
||||
if (tag.slice(0, 2) === "!!") {
|
||||
tag = `tag:yaml.org,2002:${tag.slice(2)}`;
|
||||
}
|
||||
type = schema.compiledTypeMap.fallback[tag];
|
||||
if (type &&
|
||||
typeof type.styleAliases !== "undefined" &&
|
||||
hasOwn(type.styleAliases, style)) {
|
||||
style = type.styleAliases[style];
|
||||
}
|
||||
result[tag] = style;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
export class DumperState extends State {
|
||||
constructor({ schema, indent = 2, noArrayIndent = false, skipInvalid = false, flowLevel = -1, styles = null, sortKeys = false, lineWidth = 80, noRefs = false, noCompatMode = false, condenseFlow = false, }) {
|
||||
super(schema);
|
||||
Object.defineProperty(this, "indent", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "noArrayIndent", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "skipInvalid", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "flowLevel", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "sortKeys", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "lineWidth", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "noRefs", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "noCompatMode", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "condenseFlow", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "implicitTypes", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "explicitTypes", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "tag", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: null
|
||||
});
|
||||
Object.defineProperty(this, "result", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: ""
|
||||
});
|
||||
Object.defineProperty(this, "duplicates", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: []
|
||||
});
|
||||
Object.defineProperty(this, "usedDuplicates", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: []
|
||||
}); // changed from null to []
|
||||
Object.defineProperty(this, "styleMap", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "dump", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
this.indent = Math.max(1, indent);
|
||||
this.noArrayIndent = noArrayIndent;
|
||||
this.skipInvalid = skipInvalid;
|
||||
this.flowLevel = flowLevel;
|
||||
this.styleMap = compileStyleMap(this.schema, styles);
|
||||
this.sortKeys = sortKeys;
|
||||
this.lineWidth = lineWidth;
|
||||
this.noRefs = noRefs;
|
||||
this.noCompatMode = noCompatMode;
|
||||
this.condenseFlow = condenseFlow;
|
||||
this.implicitTypes = this.schema.compiledImplicit;
|
||||
this.explicitTypes = this.schema.compiledExplicit;
|
||||
}
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
export class YAMLError extends Error {
|
||||
constructor(message = "(unknown reason)", mark = "") {
|
||||
super(`${message} ${mark}`);
|
||||
Object.defineProperty(this, "mark", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: mark
|
||||
});
|
||||
this.name = this.constructor.name;
|
||||
}
|
||||
toString(_compact) {
|
||||
return `${this.name}: ${this.message} ${this.mark}`;
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,150 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { State } from "../state.js";
|
||||
export class LoaderState extends State {
|
||||
constructor(input, { filename, schema, onWarning, legacy = false, json = false, listener = null, }) {
|
||||
super(schema);
|
||||
Object.defineProperty(this, "input", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: input
|
||||
});
|
||||
Object.defineProperty(this, "documents", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: []
|
||||
});
|
||||
Object.defineProperty(this, "length", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "lineIndent", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: 0
|
||||
});
|
||||
Object.defineProperty(this, "lineStart", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: 0
|
||||
});
|
||||
Object.defineProperty(this, "position", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: 0
|
||||
});
|
||||
Object.defineProperty(this, "line", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: 0
|
||||
});
|
||||
Object.defineProperty(this, "filename", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "onWarning", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "legacy", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "json", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "listener", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "implicitTypes", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "typeMap", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "version", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "checkLineBreaks", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "tagMap", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "anchorMap", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "tag", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "anchor", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "kind", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "result", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: ""
|
||||
});
|
||||
this.filename = filename;
|
||||
this.onWarning = onWarning;
|
||||
this.legacy = legacy;
|
||||
this.json = json;
|
||||
this.listener = listener;
|
||||
this.implicitTypes = this.schema.compiledImplicit;
|
||||
this.typeMap = this.schema.compiledTypeMap;
|
||||
this.length = input.length;
|
||||
}
|
||||
}
|
||||
@@ -1,81 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { repeat } from "./utils.js";
|
||||
export class Mark {
|
||||
constructor(name, buffer, position, line, column) {
|
||||
Object.defineProperty(this, "name", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: name
|
||||
});
|
||||
Object.defineProperty(this, "buffer", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: buffer
|
||||
});
|
||||
Object.defineProperty(this, "position", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: position
|
||||
});
|
||||
Object.defineProperty(this, "line", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: line
|
||||
});
|
||||
Object.defineProperty(this, "column", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: column
|
||||
});
|
||||
}
|
||||
getSnippet(indent = 4, maxLength = 75) {
|
||||
if (!this.buffer)
|
||||
return null;
|
||||
let head = "";
|
||||
let start = this.position;
|
||||
while (start > 0 &&
|
||||
"\x00\r\n\x85\u2028\u2029".indexOf(this.buffer.charAt(start - 1)) === -1) {
|
||||
start -= 1;
|
||||
if (this.position - start > maxLength / 2 - 1) {
|
||||
head = " ... ";
|
||||
start += 5;
|
||||
break;
|
||||
}
|
||||
}
|
||||
let tail = "";
|
||||
let end = this.position;
|
||||
while (end < this.buffer.length &&
|
||||
"\x00\r\n\x85\u2028\u2029".indexOf(this.buffer.charAt(end)) === -1) {
|
||||
end += 1;
|
||||
if (end - this.position > maxLength / 2 - 1) {
|
||||
tail = " ... ";
|
||||
end -= 5;
|
||||
break;
|
||||
}
|
||||
}
|
||||
const snippet = this.buffer.slice(start, end);
|
||||
return `${repeat(" ", indent)}${head}${snippet}${tail}\n${repeat(" ", indent + this.position - start + head.length)}^`;
|
||||
}
|
||||
toString(compact) {
|
||||
let snippet, where = "";
|
||||
if (this.name) {
|
||||
where += `in "${this.name}" `;
|
||||
}
|
||||
where += `at line ${this.line + 1}, column ${this.column + 1}`;
|
||||
if (!compact) {
|
||||
snippet = this.getSnippet();
|
||||
if (snippet) {
|
||||
where += `:\n${snippet}`;
|
||||
}
|
||||
}
|
||||
return where;
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { load, loadAll } from "./loader/loader.js";
|
||||
/**
|
||||
* Parses `content` as single YAML document.
|
||||
*
|
||||
* Returns a JavaScript object or throws `YAMLException` on error.
|
||||
* By default, does not support regexps, functions and undefined. This method is safe for untrusted data.
|
||||
*/
|
||||
export function parse(content, options) {
|
||||
return load(content, options);
|
||||
}
|
||||
export function parseAll(content, iterator, options) {
|
||||
return loadAll(content, iterator, options);
|
||||
}
|
||||
@@ -1,102 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { YAMLError } from "./error.js";
|
||||
function compileList(schema, name, result) {
|
||||
const exclude = [];
|
||||
for (const includedSchema of schema.include) {
|
||||
result = compileList(includedSchema, name, result);
|
||||
}
|
||||
for (const currentType of schema[name]) {
|
||||
for (let previousIndex = 0; previousIndex < result.length; previousIndex++) {
|
||||
const previousType = result[previousIndex];
|
||||
if (previousType.tag === currentType.tag &&
|
||||
previousType.kind === currentType.kind) {
|
||||
exclude.push(previousIndex);
|
||||
}
|
||||
}
|
||||
result.push(currentType);
|
||||
}
|
||||
return result.filter((_type, index) => !exclude.includes(index));
|
||||
}
|
||||
function compileMap(...typesList) {
|
||||
const result = {
|
||||
fallback: {},
|
||||
mapping: {},
|
||||
scalar: {},
|
||||
sequence: {},
|
||||
};
|
||||
for (const types of typesList) {
|
||||
for (const type of types) {
|
||||
if (type.kind !== null) {
|
||||
result[type.kind][type.tag] = result["fallback"][type.tag] = type;
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
export class Schema {
|
||||
constructor(definition) {
|
||||
Object.defineProperty(this, "implicit", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "explicit", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "include", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "compiledImplicit", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "compiledExplicit", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "compiledTypeMap", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
this.explicit = definition.explicit || [];
|
||||
this.implicit = definition.implicit || [];
|
||||
this.include = definition.include || [];
|
||||
for (const type of this.implicit) {
|
||||
if (type.loadKind && type.loadKind !== "scalar") {
|
||||
throw new YAMLError("There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.");
|
||||
}
|
||||
}
|
||||
this.compiledImplicit = compileList(this, "implicit", []);
|
||||
this.compiledExplicit = compileList(this, "explicit", []);
|
||||
this.compiledTypeMap = compileMap(this.compiledImplicit, this.compiledExplicit);
|
||||
}
|
||||
/* Returns a new extended schema from current schema */
|
||||
extend(definition) {
|
||||
return new Schema({
|
||||
implicit: [
|
||||
...new Set([...this.implicit, ...(definition?.implicit ?? [])]),
|
||||
],
|
||||
explicit: [
|
||||
...new Set([...this.explicit, ...(definition?.explicit ?? [])]),
|
||||
],
|
||||
include: [...new Set([...this.include, ...(definition?.include ?? [])])],
|
||||
});
|
||||
}
|
||||
static create() { }
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Schema } from "../schema.js";
|
||||
import { json } from "./json.js";
|
||||
// Standard YAML's Core schema.
|
||||
// http://www.yaml.org/spec/1.2/spec.html#id2804923
|
||||
export const core = new Schema({
|
||||
include: [json],
|
||||
});
|
||||
@@ -1,14 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Schema } from "../schema.js";
|
||||
import { binary, merge, omap, pairs, set, timestamp } from "../type/mod.js";
|
||||
import { core } from "./core.js";
|
||||
// JS-YAML's default schema for `safeLoad` function.
|
||||
// It is not described in the YAML specification.
|
||||
export const def = new Schema({
|
||||
explicit: [binary, omap, pairs, set],
|
||||
implicit: [timestamp, merge],
|
||||
include: [core],
|
||||
});
|
||||
@@ -1,10 +0,0 @@
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Schema } from "../schema.js";
|
||||
import { regexp, undefinedType } from "../type/mod.js";
|
||||
import { def } from "./default.js";
|
||||
// Extends JS-YAML default schema with additional JavaScript types
|
||||
// It is not described in the YAML specification.
|
||||
export const extended = new Schema({
|
||||
explicit: [regexp, undefinedType],
|
||||
include: [def],
|
||||
});
|
||||
@@ -1,11 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Schema } from "../schema.js";
|
||||
import { map, seq, str } from "../type/mod.js";
|
||||
// Standard YAML's Failsafe schema.
|
||||
// http://www.yaml.org/spec/1.2/spec.html#id2802346
|
||||
export const failsafe = new Schema({
|
||||
explicit: [str, seq, map],
|
||||
});
|
||||
@@ -1,13 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Schema } from "../schema.js";
|
||||
import { bool, float, int, nil } from "../type/mod.js";
|
||||
import { failsafe } from "./failsafe.js";
|
||||
// Standard YAML's JSON schema.
|
||||
// http://www.yaml.org/spec/1.2/spec.html#id2803231
|
||||
export const json = new Schema({
|
||||
implicit: [nil, bool, int, float],
|
||||
include: [failsafe],
|
||||
});
|
||||
@@ -1,9 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
export { core as CORE_SCHEMA } from "./core.js";
|
||||
export { def as DEFAULT_SCHEMA } from "./default.js";
|
||||
export { extended as EXTENDED_SCHEMA } from "./extended.js";
|
||||
export { failsafe as FAILSAFE_SCHEMA } from "./failsafe.js";
|
||||
export { json as JSON_SCHEMA } from "./json.js";
|
||||
@@ -1,15 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { DEFAULT_SCHEMA } from "./schema/mod.js";
|
||||
export class State {
|
||||
constructor(schema = DEFAULT_SCHEMA) {
|
||||
Object.defineProperty(this, "schema", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: schema
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { dump } from "./dumper/dumper.js";
|
||||
/**
|
||||
* Serializes `object` as a YAML document.
|
||||
*
|
||||
* You can disable exceptions by setting the skipInvalid option to true.
|
||||
*/
|
||||
export function stringify(obj, options) {
|
||||
return dump(obj, options);
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
const DEFAULT_RESOLVE = () => true;
|
||||
const DEFAULT_CONSTRUCT = (data) => data;
|
||||
function checkTagFormat(tag) {
|
||||
return tag;
|
||||
}
|
||||
export class Type {
|
||||
constructor(tag, options) {
|
||||
Object.defineProperty(this, "tag", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "kind", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: null
|
||||
});
|
||||
Object.defineProperty(this, "instanceOf", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "predicate", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "represent", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "defaultStyle", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "styleAliases", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "loadKind", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "resolve", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: () => true
|
||||
});
|
||||
Object.defineProperty(this, "construct", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: (data) => data
|
||||
});
|
||||
this.tag = checkTagFormat(tag);
|
||||
if (options) {
|
||||
this.kind = options.kind;
|
||||
this.resolve = options.resolve || DEFAULT_RESOLVE;
|
||||
this.construct = options.construct || DEFAULT_CONSTRUCT;
|
||||
this.instanceOf = options.instanceOf;
|
||||
this.predicate = options.predicate;
|
||||
this.represent = options.represent;
|
||||
this.defaultStyle = options.defaultStyle;
|
||||
this.styleAliases = options.styleAliases;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,119 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
import { Buffer } from "../../../io/buffer.js";
|
||||
// [ 64, 65, 66 ] -> [ padding, CR, LF ]
|
||||
const BASE64_MAP = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r";
|
||||
function resolveYamlBinary(data) {
|
||||
if (data === null)
|
||||
return false;
|
||||
let code;
|
||||
let bitlen = 0;
|
||||
const max = data.length;
|
||||
const map = BASE64_MAP;
|
||||
// Convert one by one.
|
||||
for (let idx = 0; idx < max; idx++) {
|
||||
code = map.indexOf(data.charAt(idx));
|
||||
// Skip CR/LF
|
||||
if (code > 64)
|
||||
continue;
|
||||
// Fail on illegal characters
|
||||
if (code < 0)
|
||||
return false;
|
||||
bitlen += 6;
|
||||
}
|
||||
// If there are any bits left, source was corrupted
|
||||
return bitlen % 8 === 0;
|
||||
}
|
||||
function constructYamlBinary(data) {
|
||||
// remove CR/LF & padding to simplify scan
|
||||
const input = data.replace(/[\r\n=]/g, "");
|
||||
const max = input.length;
|
||||
const map = BASE64_MAP;
|
||||
// Collect by 6*4 bits (3 bytes)
|
||||
const result = [];
|
||||
let bits = 0;
|
||||
for (let idx = 0; idx < max; idx++) {
|
||||
if (idx % 4 === 0 && idx) {
|
||||
result.push((bits >> 16) & 0xff);
|
||||
result.push((bits >> 8) & 0xff);
|
||||
result.push(bits & 0xff);
|
||||
}
|
||||
bits = (bits << 6) | map.indexOf(input.charAt(idx));
|
||||
}
|
||||
// Dump tail
|
||||
const tailbits = (max % 4) * 6;
|
||||
if (tailbits === 0) {
|
||||
result.push((bits >> 16) & 0xff);
|
||||
result.push((bits >> 8) & 0xff);
|
||||
result.push(bits & 0xff);
|
||||
}
|
||||
else if (tailbits === 18) {
|
||||
result.push((bits >> 10) & 0xff);
|
||||
result.push((bits >> 2) & 0xff);
|
||||
}
|
||||
else if (tailbits === 12) {
|
||||
result.push((bits >> 4) & 0xff);
|
||||
}
|
||||
return new Buffer(new Uint8Array(result));
|
||||
}
|
||||
function representYamlBinary(object) {
|
||||
const max = object.length;
|
||||
const map = BASE64_MAP;
|
||||
// Convert every three bytes to 4 ASCII characters.
|
||||
let result = "";
|
||||
let bits = 0;
|
||||
for (let idx = 0; idx < max; idx++) {
|
||||
if (idx % 3 === 0 && idx) {
|
||||
result += map[(bits >> 18) & 0x3f];
|
||||
result += map[(bits >> 12) & 0x3f];
|
||||
result += map[(bits >> 6) & 0x3f];
|
||||
result += map[bits & 0x3f];
|
||||
}
|
||||
bits = (bits << 8) + object[idx];
|
||||
}
|
||||
// Dump tail
|
||||
const tail = max % 3;
|
||||
if (tail === 0) {
|
||||
result += map[(bits >> 18) & 0x3f];
|
||||
result += map[(bits >> 12) & 0x3f];
|
||||
result += map[(bits >> 6) & 0x3f];
|
||||
result += map[bits & 0x3f];
|
||||
}
|
||||
else if (tail === 2) {
|
||||
result += map[(bits >> 10) & 0x3f];
|
||||
result += map[(bits >> 4) & 0x3f];
|
||||
result += map[(bits << 2) & 0x3f];
|
||||
result += map[64];
|
||||
}
|
||||
else if (tail === 1) {
|
||||
result += map[(bits >> 2) & 0x3f];
|
||||
result += map[(bits << 4) & 0x3f];
|
||||
result += map[64];
|
||||
result += map[64];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function isBinary(obj) {
|
||||
const buf = new Buffer();
|
||||
try {
|
||||
if (0 > buf.readFromSync(obj))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
finally {
|
||||
buf.reset();
|
||||
}
|
||||
}
|
||||
export const binary = new Type("tag:yaml.org,2002:binary", {
|
||||
construct: constructYamlBinary,
|
||||
kind: "scalar",
|
||||
predicate: isBinary,
|
||||
represent: representYamlBinary,
|
||||
resolve: resolveYamlBinary,
|
||||
});
|
||||
@@ -1,32 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
import { isBoolean } from "../utils.js";
|
||||
function resolveYamlBoolean(data) {
|
||||
const max = data.length;
|
||||
return ((max === 4 && (data === "true" || data === "True" || data === "TRUE")) ||
|
||||
(max === 5 && (data === "false" || data === "False" || data === "FALSE")));
|
||||
}
|
||||
function constructYamlBoolean(data) {
|
||||
return data === "true" || data === "True" || data === "TRUE";
|
||||
}
|
||||
export const bool = new Type("tag:yaml.org,2002:bool", {
|
||||
construct: constructYamlBoolean,
|
||||
defaultStyle: "lowercase",
|
||||
kind: "scalar",
|
||||
predicate: isBoolean,
|
||||
represent: {
|
||||
lowercase(object) {
|
||||
return object ? "true" : "false";
|
||||
},
|
||||
uppercase(object) {
|
||||
return object ? "TRUE" : "FALSE";
|
||||
},
|
||||
camelcase(object) {
|
||||
return object ? "True" : "False";
|
||||
},
|
||||
},
|
||||
resolve: resolveYamlBoolean,
|
||||
});
|
||||
@@ -1,106 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
import { isNegativeZero } from "../utils.js";
|
||||
const YAML_FLOAT_PATTERN = new RegExp(
|
||||
// 2.5e4, 2.5 and integers
|
||||
"^(?:[-+]?(?:0|[1-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?" +
|
||||
// .2e4, .2
|
||||
// special case, seems not from spec
|
||||
"|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?" +
|
||||
// 20:59
|
||||
"|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*" +
|
||||
// .inf
|
||||
"|[-+]?\\.(?:inf|Inf|INF)" +
|
||||
// .nan
|
||||
"|\\.(?:nan|NaN|NAN))$");
|
||||
function resolveYamlFloat(data) {
|
||||
if (!YAML_FLOAT_PATTERN.test(data) ||
|
||||
// Quick hack to not allow integers end with `_`
|
||||
// Probably should update regexp & check speed
|
||||
data[data.length - 1] === "_") {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function constructYamlFloat(data) {
|
||||
let value = data.replace(/_/g, "").toLowerCase();
|
||||
const sign = value[0] === "-" ? -1 : 1;
|
||||
const digits = [];
|
||||
if ("+-".indexOf(value[0]) >= 0) {
|
||||
value = value.slice(1);
|
||||
}
|
||||
if (value === ".inf") {
|
||||
return sign === 1 ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY;
|
||||
}
|
||||
if (value === ".nan") {
|
||||
return NaN;
|
||||
}
|
||||
if (value.indexOf(":") >= 0) {
|
||||
value.split(":").forEach((v) => {
|
||||
digits.unshift(parseFloat(v));
|
||||
});
|
||||
let valueNb = 0.0;
|
||||
let base = 1;
|
||||
digits.forEach((d) => {
|
||||
valueNb += d * base;
|
||||
base *= 60;
|
||||
});
|
||||
return sign * valueNb;
|
||||
}
|
||||
return sign * parseFloat(value);
|
||||
}
|
||||
const SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/;
|
||||
function representYamlFloat(object, style) {
|
||||
if (isNaN(object)) {
|
||||
switch (style) {
|
||||
case "lowercase":
|
||||
return ".nan";
|
||||
case "uppercase":
|
||||
return ".NAN";
|
||||
case "camelcase":
|
||||
return ".NaN";
|
||||
}
|
||||
}
|
||||
else if (Number.POSITIVE_INFINITY === object) {
|
||||
switch (style) {
|
||||
case "lowercase":
|
||||
return ".inf";
|
||||
case "uppercase":
|
||||
return ".INF";
|
||||
case "camelcase":
|
||||
return ".Inf";
|
||||
}
|
||||
}
|
||||
else if (Number.NEGATIVE_INFINITY === object) {
|
||||
switch (style) {
|
||||
case "lowercase":
|
||||
return "-.inf";
|
||||
case "uppercase":
|
||||
return "-.INF";
|
||||
case "camelcase":
|
||||
return "-.Inf";
|
||||
}
|
||||
}
|
||||
else if (isNegativeZero(object)) {
|
||||
return "-0.0";
|
||||
}
|
||||
const res = object.toString(10);
|
||||
// JS stringifier can build scientific format without dots: 5e-100,
|
||||
// while YAML requires dot: 5.e-100. Fix it with simple hack
|
||||
return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace("e", ".e") : res;
|
||||
}
|
||||
function isFloat(object) {
|
||||
return (Object.prototype.toString.call(object) === "[object Number]" &&
|
||||
(object % 1 !== 0 || isNegativeZero(object)));
|
||||
}
|
||||
export const float = new Type("tag:yaml.org,2002:float", {
|
||||
construct: constructYamlFloat,
|
||||
defaultStyle: "lowercase",
|
||||
kind: "scalar",
|
||||
predicate: isFloat,
|
||||
represent: representYamlFloat,
|
||||
resolve: resolveYamlFloat,
|
||||
});
|
||||
@@ -1,38 +0,0 @@
|
||||
// Ported and adapted from js-yaml-js-types v1.0.0:
|
||||
// https://github.com/nodeca/js-yaml-js-types/tree/ac537e7bbdd3c2cbbd9882ca3919c520c2dc022b
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
// Note: original implementation used Esprima to handle functions
|
||||
// To avoid dependencies, we'll just try to check if we can construct a function from given string
|
||||
function reconstructFunction(code) {
|
||||
const func = new Function(`return ${code}`)();
|
||||
if (!(func instanceof Function)) {
|
||||
throw new TypeError(`Expected function but got ${typeof func}: ${code}`);
|
||||
}
|
||||
return func;
|
||||
}
|
||||
export const func = new Type("tag:yaml.org,2002:js/function", {
|
||||
kind: "scalar",
|
||||
resolve(data) {
|
||||
if (data === null) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
reconstructFunction(`${data}`);
|
||||
return true;
|
||||
}
|
||||
catch (_err) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
construct(data) {
|
||||
return reconstructFunction(data);
|
||||
},
|
||||
predicate(object) {
|
||||
return object instanceof Function;
|
||||
},
|
||||
represent(object) {
|
||||
return object.toString();
|
||||
},
|
||||
});
|
||||
@@ -1,168 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
import { isNegativeZero } from "../utils.js";
|
||||
function isHexCode(c) {
|
||||
return ((0x30 <= /* 0 */ c && c <= 0x39) /* 9 */ ||
|
||||
(0x41 <= /* A */ c && c <= 0x46) /* F */ ||
|
||||
(0x61 <= /* a */ c && c <= 0x66) /* f */);
|
||||
}
|
||||
function isOctCode(c) {
|
||||
return 0x30 <= /* 0 */ c && c <= 0x37 /* 7 */;
|
||||
}
|
||||
function isDecCode(c) {
|
||||
return 0x30 <= /* 0 */ c && c <= 0x39 /* 9 */;
|
||||
}
|
||||
function resolveYamlInteger(data) {
|
||||
const max = data.length;
|
||||
let index = 0;
|
||||
let hasDigits = false;
|
||||
if (!max)
|
||||
return false;
|
||||
let ch = data[index];
|
||||
// sign
|
||||
if (ch === "-" || ch === "+") {
|
||||
ch = data[++index];
|
||||
}
|
||||
if (ch === "0") {
|
||||
// 0
|
||||
if (index + 1 === max)
|
||||
return true;
|
||||
ch = data[++index];
|
||||
// base 2, base 8, base 16
|
||||
if (ch === "b") {
|
||||
// base 2
|
||||
index++;
|
||||
for (; index < max; index++) {
|
||||
ch = data[index];
|
||||
if (ch === "_")
|
||||
continue;
|
||||
if (ch !== "0" && ch !== "1")
|
||||
return false;
|
||||
hasDigits = true;
|
||||
}
|
||||
return hasDigits && ch !== "_";
|
||||
}
|
||||
if (ch === "x") {
|
||||
// base 16
|
||||
index++;
|
||||
for (; index < max; index++) {
|
||||
ch = data[index];
|
||||
if (ch === "_")
|
||||
continue;
|
||||
if (!isHexCode(data.charCodeAt(index)))
|
||||
return false;
|
||||
hasDigits = true;
|
||||
}
|
||||
return hasDigits && ch !== "_";
|
||||
}
|
||||
// base 8
|
||||
for (; index < max; index++) {
|
||||
ch = data[index];
|
||||
if (ch === "_")
|
||||
continue;
|
||||
if (!isOctCode(data.charCodeAt(index)))
|
||||
return false;
|
||||
hasDigits = true;
|
||||
}
|
||||
return hasDigits && ch !== "_";
|
||||
}
|
||||
// base 10 (except 0) or base 60
|
||||
// value should not start with `_`;
|
||||
if (ch === "_")
|
||||
return false;
|
||||
for (; index < max; index++) {
|
||||
ch = data[index];
|
||||
if (ch === "_")
|
||||
continue;
|
||||
if (ch === ":")
|
||||
break;
|
||||
if (!isDecCode(data.charCodeAt(index))) {
|
||||
return false;
|
||||
}
|
||||
hasDigits = true;
|
||||
}
|
||||
// Should have digits and should not end with `_`
|
||||
if (!hasDigits || ch === "_")
|
||||
return false;
|
||||
// if !base60 - done;
|
||||
if (ch !== ":")
|
||||
return true;
|
||||
// base60 almost not used, no needs to optimize
|
||||
return /^(:[0-5]?[0-9])+$/.test(data.slice(index));
|
||||
}
|
||||
function constructYamlInteger(data) {
|
||||
let value = data;
|
||||
const digits = [];
|
||||
if (value.indexOf("_") !== -1) {
|
||||
value = value.replace(/_/g, "");
|
||||
}
|
||||
let sign = 1;
|
||||
let ch = value[0];
|
||||
if (ch === "-" || ch === "+") {
|
||||
if (ch === "-")
|
||||
sign = -1;
|
||||
value = value.slice(1);
|
||||
ch = value[0];
|
||||
}
|
||||
if (value === "0")
|
||||
return 0;
|
||||
if (ch === "0") {
|
||||
if (value[1] === "b")
|
||||
return sign * parseInt(value.slice(2), 2);
|
||||
if (value[1] === "x")
|
||||
return sign * parseInt(value, 16);
|
||||
return sign * parseInt(value, 8);
|
||||
}
|
||||
if (value.indexOf(":") !== -1) {
|
||||
value.split(":").forEach((v) => {
|
||||
digits.unshift(parseInt(v, 10));
|
||||
});
|
||||
let valueInt = 0;
|
||||
let base = 1;
|
||||
digits.forEach((d) => {
|
||||
valueInt += d * base;
|
||||
base *= 60;
|
||||
});
|
||||
return sign * valueInt;
|
||||
}
|
||||
return sign * parseInt(value, 10);
|
||||
}
|
||||
function isInteger(object) {
|
||||
return (Object.prototype.toString.call(object) === "[object Number]" &&
|
||||
object % 1 === 0 &&
|
||||
!isNegativeZero(object));
|
||||
}
|
||||
export const int = new Type("tag:yaml.org,2002:int", {
|
||||
construct: constructYamlInteger,
|
||||
defaultStyle: "decimal",
|
||||
kind: "scalar",
|
||||
predicate: isInteger,
|
||||
represent: {
|
||||
binary(obj) {
|
||||
return obj >= 0
|
||||
? `0b${obj.toString(2)}`
|
||||
: `-0b${obj.toString(2).slice(1)}`;
|
||||
},
|
||||
octal(obj) {
|
||||
return obj >= 0 ? `0${obj.toString(8)}` : `-0${obj.toString(8).slice(1)}`;
|
||||
},
|
||||
decimal(obj) {
|
||||
return obj.toString(10);
|
||||
},
|
||||
hexadecimal(obj) {
|
||||
return obj >= 0
|
||||
? `0x${obj.toString(16).toUpperCase()}`
|
||||
: `-0x${obj.toString(16).toUpperCase().slice(1)}`;
|
||||
},
|
||||
},
|
||||
resolve: resolveYamlInteger,
|
||||
styleAliases: {
|
||||
binary: [2, "bin"],
|
||||
decimal: [10, "dec"],
|
||||
hexadecimal: [16, "hex"],
|
||||
octal: [8, "oct"],
|
||||
},
|
||||
});
|
||||
@@ -1,11 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
export const map = new Type("tag:yaml.org,2002:map", {
|
||||
construct(data) {
|
||||
return data !== null ? data : {};
|
||||
},
|
||||
kind: "mapping",
|
||||
});
|
||||
@@ -1,12 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
function resolveYamlMerge(data) {
|
||||
return data === "<<" || data === null;
|
||||
}
|
||||
export const merge = new Type("tag:yaml.org,2002:merge", {
|
||||
kind: "scalar",
|
||||
resolve: resolveYamlMerge,
|
||||
});
|
||||
@@ -1,20 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
export { binary } from "./binary.js";
|
||||
export { bool } from "./bool.js";
|
||||
export { float } from "./float.js";
|
||||
export { func } from "./function.js";
|
||||
export { int } from "./int.js";
|
||||
export { map } from "./map.js";
|
||||
export { merge } from "./merge.js";
|
||||
export { nil } from "./nil.js";
|
||||
export { omap } from "./omap.js";
|
||||
export { pairs } from "./pairs.js";
|
||||
export { regexp } from "./regexp.js";
|
||||
export { seq } from "./seq.js";
|
||||
export { set } from "./set.js";
|
||||
export { str } from "./str.js";
|
||||
export { timestamp } from "./timestamp.js";
|
||||
export { undefinedType } from "./undefined.js";
|
||||
@@ -1,37 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
function resolveYamlNull(data) {
|
||||
const max = data.length;
|
||||
return ((max === 1 && data === "~") ||
|
||||
(max === 4 && (data === "null" || data === "Null" || data === "NULL")));
|
||||
}
|
||||
function constructYamlNull() {
|
||||
return null;
|
||||
}
|
||||
function isNull(object) {
|
||||
return object === null;
|
||||
}
|
||||
export const nil = new Type("tag:yaml.org,2002:null", {
|
||||
construct: constructYamlNull,
|
||||
defaultStyle: "lowercase",
|
||||
kind: "scalar",
|
||||
predicate: isNull,
|
||||
represent: {
|
||||
canonical() {
|
||||
return "~";
|
||||
},
|
||||
lowercase() {
|
||||
return "null";
|
||||
},
|
||||
uppercase() {
|
||||
return "NULL";
|
||||
},
|
||||
camelcase() {
|
||||
return "Null";
|
||||
},
|
||||
},
|
||||
resolve: resolveYamlNull,
|
||||
});
|
||||
@@ -1,40 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
const { hasOwn } = Object;
|
||||
const _toString = Object.prototype.toString;
|
||||
function resolveYamlOmap(data) {
|
||||
const objectKeys = [];
|
||||
let pairKey = "";
|
||||
let pairHasKey = false;
|
||||
for (const pair of data) {
|
||||
pairHasKey = false;
|
||||
if (_toString.call(pair) !== "[object Object]")
|
||||
return false;
|
||||
for (pairKey in pair) {
|
||||
if (hasOwn(pair, pairKey)) {
|
||||
if (!pairHasKey)
|
||||
pairHasKey = true;
|
||||
else
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!pairHasKey)
|
||||
return false;
|
||||
if (objectKeys.indexOf(pairKey) === -1)
|
||||
objectKeys.push(pairKey);
|
||||
else
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function constructYamlOmap(data) {
|
||||
return data !== null ? data : [];
|
||||
}
|
||||
export const omap = new Type("tag:yaml.org,2002:omap", {
|
||||
construct: constructYamlOmap,
|
||||
kind: "sequence",
|
||||
resolve: resolveYamlOmap,
|
||||
});
|
||||
@@ -1,35 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
const _toString = Object.prototype.toString;
|
||||
function resolveYamlPairs(data) {
|
||||
const result = Array.from({ length: data.length });
|
||||
for (let index = 0; index < data.length; index++) {
|
||||
const pair = data[index];
|
||||
if (_toString.call(pair) !== "[object Object]")
|
||||
return false;
|
||||
const keys = Object.keys(pair);
|
||||
if (keys.length !== 1)
|
||||
return false;
|
||||
result[index] = [keys[0], pair[keys[0]]];
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function constructYamlPairs(data) {
|
||||
if (data === null)
|
||||
return [];
|
||||
const result = Array.from({ length: data.length });
|
||||
for (let index = 0; index < data.length; index += 1) {
|
||||
const pair = data[index];
|
||||
const keys = Object.keys(pair);
|
||||
result[index] = [keys[0], pair[keys[0]]];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
export const pairs = new Type("tag:yaml.org,2002:pairs", {
|
||||
construct: constructYamlPairs,
|
||||
kind: "sequence",
|
||||
resolve: resolveYamlPairs,
|
||||
});
|
||||
@@ -1,37 +0,0 @@
|
||||
// Ported and adapted from js-yaml-js-types v1.0.0:
|
||||
// https://github.com/nodeca/js-yaml-js-types/tree/ac537e7bbdd3c2cbbd9882ca3919c520c2dc022b
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
const REGEXP = /^\/(?<regexp>[\s\S]+)\/(?<modifiers>[gismuy]*)$/;
|
||||
export const regexp = new Type("tag:yaml.org,2002:js/regexp", {
|
||||
kind: "scalar",
|
||||
resolve(data) {
|
||||
if ((data === null) || (!data.length)) {
|
||||
return false;
|
||||
}
|
||||
const regexp = `${data}`;
|
||||
if (regexp.charAt(0) === "/") {
|
||||
// Ensure regex is properly terminated
|
||||
if (!REGEXP.test(data)) {
|
||||
return false;
|
||||
}
|
||||
// Check no duplicate modifiers
|
||||
const modifiers = [...(regexp.match(REGEXP)?.groups?.modifiers ?? "")];
|
||||
if (new Set(modifiers).size < modifiers.length) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
},
|
||||
construct(data) {
|
||||
const { regexp = `${data}`, modifiers = "" } = `${data}`.match(REGEXP)?.groups ?? {};
|
||||
return new RegExp(regexp, modifiers);
|
||||
},
|
||||
predicate(object) {
|
||||
return object instanceof RegExp;
|
||||
},
|
||||
represent(object) {
|
||||
return object.toString();
|
||||
},
|
||||
});
|
||||
@@ -1,11 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
export const seq = new Type("tag:yaml.org,2002:seq", {
|
||||
construct(data) {
|
||||
return data !== null ? data : [];
|
||||
},
|
||||
kind: "sequence",
|
||||
});
|
||||
@@ -1,25 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
const { hasOwn } = Object;
|
||||
function resolveYamlSet(data) {
|
||||
if (data === null)
|
||||
return true;
|
||||
for (const key in data) {
|
||||
if (hasOwn(data, key)) {
|
||||
if (data[key] !== null)
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function constructYamlSet(data) {
|
||||
return data !== null ? data : {};
|
||||
}
|
||||
export const set = new Type("tag:yaml.org,2002:set", {
|
||||
construct: constructYamlSet,
|
||||
kind: "mapping",
|
||||
resolve: resolveYamlSet,
|
||||
});
|
||||
@@ -1,10 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
export const str = new Type("tag:yaml.org,2002:str", {
|
||||
construct(data) {
|
||||
return data !== null ? data : "";
|
||||
},
|
||||
kind: "scalar",
|
||||
});
|
||||
@@ -1,78 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
const YAML_DATE_REGEXP = new RegExp("^([0-9][0-9][0-9][0-9])" + // [1] year
|
||||
"-([0-9][0-9])" + // [2] month
|
||||
"-([0-9][0-9])$");
|
||||
const YAML_TIMESTAMP_REGEXP = new RegExp("^([0-9][0-9][0-9][0-9])" + // [1] year
|
||||
"-([0-9][0-9]?)" + // [2] month
|
||||
"-([0-9][0-9]?)" + // [3] day
|
||||
"(?:[Tt]|[ \\t]+)" + // ...
|
||||
"([0-9][0-9]?)" + // [4] hour
|
||||
":([0-9][0-9])" + // [5] minute
|
||||
":([0-9][0-9])" + // [6] second
|
||||
"(?:\\.([0-9]*))?" + // [7] fraction
|
||||
"(?:[ \\t]*(Z|([-+])([0-9][0-9]?)" + // [8] tz [9] tz_sign [10] tz_hour
|
||||
"(?::([0-9][0-9]))?))?$");
|
||||
function resolveYamlTimestamp(data) {
|
||||
if (data === null)
|
||||
return false;
|
||||
if (YAML_DATE_REGEXP.exec(data) !== null)
|
||||
return true;
|
||||
if (YAML_TIMESTAMP_REGEXP.exec(data) !== null)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
function constructYamlTimestamp(data) {
|
||||
let match = YAML_DATE_REGEXP.exec(data);
|
||||
if (match === null)
|
||||
match = YAML_TIMESTAMP_REGEXP.exec(data);
|
||||
if (match === null)
|
||||
throw new Error("Date resolve error");
|
||||
// match: [1] year [2] month [3] day
|
||||
const year = +match[1];
|
||||
const month = +match[2] - 1; // JS month starts with 0
|
||||
const day = +match[3];
|
||||
if (!match[4]) {
|
||||
// no hour
|
||||
return new Date(Date.UTC(year, month, day));
|
||||
}
|
||||
// match: [4] hour [5] minute [6] second [7] fraction
|
||||
const hour = +match[4];
|
||||
const minute = +match[5];
|
||||
const second = +match[6];
|
||||
let fraction = 0;
|
||||
if (match[7]) {
|
||||
let partFraction = match[7].slice(0, 3);
|
||||
while (partFraction.length < 3) {
|
||||
// milli-seconds
|
||||
partFraction += "0";
|
||||
}
|
||||
fraction = +partFraction;
|
||||
}
|
||||
// match: [8] tz [9] tz_sign [10] tz_hour [11] tz_minute
|
||||
let delta = null;
|
||||
if (match[9]) {
|
||||
const tzHour = +match[10];
|
||||
const tzMinute = +(match[11] || 0);
|
||||
delta = (tzHour * 60 + tzMinute) * 60000; // delta in milli-seconds
|
||||
if (match[9] === "-")
|
||||
delta = -delta;
|
||||
}
|
||||
const date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction));
|
||||
if (delta)
|
||||
date.setTime(date.getTime() - delta);
|
||||
return date;
|
||||
}
|
||||
function representYamlTimestamp(date) {
|
||||
return date.toISOString();
|
||||
}
|
||||
export const timestamp = new Type("tag:yaml.org,2002:timestamp", {
|
||||
construct: constructYamlTimestamp,
|
||||
instanceOf: Date,
|
||||
kind: "scalar",
|
||||
represent: representYamlTimestamp,
|
||||
resolve: resolveYamlTimestamp,
|
||||
});
|
||||
@@ -1,20 +0,0 @@
|
||||
// Ported and adapted from js-yaml-js-types v1.0.0:
|
||||
// https://github.com/nodeca/js-yaml-js-types/tree/ac537e7bbdd3c2cbbd9882ca3919c520c2dc022b
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import { Type } from "../type.js";
|
||||
export const undefinedType = new Type("tag:yaml.org,2002:js/undefined", {
|
||||
kind: "scalar",
|
||||
resolve() {
|
||||
return true;
|
||||
},
|
||||
construct() {
|
||||
return undefined;
|
||||
},
|
||||
predicate(object) {
|
||||
return typeof object === "undefined";
|
||||
},
|
||||
represent() {
|
||||
return "";
|
||||
},
|
||||
});
|
||||
@@ -1,57 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
export function isNothing(subject) {
|
||||
return typeof subject === "undefined" || subject === null;
|
||||
}
|
||||
export function isArray(value) {
|
||||
return Array.isArray(value);
|
||||
}
|
||||
export function isBoolean(value) {
|
||||
return typeof value === "boolean" || value instanceof Boolean;
|
||||
}
|
||||
export function isNull(value) {
|
||||
return value === null;
|
||||
}
|
||||
export function isNumber(value) {
|
||||
return typeof value === "number" || value instanceof Number;
|
||||
}
|
||||
export function isString(value) {
|
||||
return typeof value === "string" || value instanceof String;
|
||||
}
|
||||
export function isSymbol(value) {
|
||||
return typeof value === "symbol";
|
||||
}
|
||||
export function isUndefined(value) {
|
||||
return value === undefined;
|
||||
}
|
||||
export function isObject(value) {
|
||||
return value !== null && typeof value === "object";
|
||||
}
|
||||
export function isError(e) {
|
||||
return e instanceof Error;
|
||||
}
|
||||
export function isFunction(value) {
|
||||
return typeof value === "function";
|
||||
}
|
||||
export function isRegExp(value) {
|
||||
return value instanceof RegExp;
|
||||
}
|
||||
export function toArray(sequence) {
|
||||
if (isArray(sequence))
|
||||
return sequence;
|
||||
if (isNothing(sequence))
|
||||
return [];
|
||||
return [sequence];
|
||||
}
|
||||
export function repeat(str, count) {
|
||||
let result = "";
|
||||
for (let cycle = 0; cycle < count; cycle++) {
|
||||
result += str;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
export function isNegativeZero(i) {
|
||||
return i === 0 && Number.NEGATIVE_INFINITY === 1 / i;
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
// Ported from js-yaml v3.13.1:
|
||||
// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da
|
||||
// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
export { parse, parseAll } from "./_yaml/parse.js";
|
||||
export { stringify } from "./_yaml/stringify.js";
|
||||
export { Type } from "./_yaml/type.js";
|
||||
export { CORE_SCHEMA, DEFAULT_SCHEMA, EXTENDED_SCHEMA, FAILSAFE_SCHEMA, JSON_SCHEMA, } from "./_yaml/schema/mod.js";
|
||||
@@ -1,977 +0,0 @@
|
||||
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
||||
if (kind === "m") throw new TypeError("Private method is not writable");
|
||||
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
||||
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
||||
};
|
||||
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
||||
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
||||
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
||||
};
|
||||
var _Buffer_instances, _Buffer_buf, _Buffer_off, _Buffer_tryGrowByReslice, _Buffer_reslice, _Buffer_grow, _BufReader_buf, _BufReader_rd, _BufReader_r, _BufReader_w, _BufReader_eof, _BufReader_fill, _BufReader_reset, _BufWriter_writer, _BufWriterSync_writer;
|
||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||
import * as dntShim from "../../../../_dnt.shims.js";
|
||||
import { assert } from "../_util/assert.js";
|
||||
import { BytesList } from "../bytes/bytes_list.js";
|
||||
import { concat, copy } from "../bytes/mod.js";
|
||||
// MIN_READ is the minimum ArrayBuffer size passed to a read call by
|
||||
// buffer.ReadFrom. As long as the Buffer has at least MIN_READ bytes beyond
|
||||
// what is required to hold the contents of r, readFrom() will not grow the
|
||||
// underlying buffer.
|
||||
const MIN_READ = 32 * 1024;
|
||||
const MAX_SIZE = 2 ** 32 - 2;
|
||||
/** A variable-sized buffer of bytes with `read()` and `write()` methods.
|
||||
*
|
||||
* Buffer is almost always used with some I/O like files and sockets. It allows
|
||||
* one to buffer up a download from a socket. Buffer grows and shrinks as
|
||||
* necessary.
|
||||
*
|
||||
* Buffer is NOT the same thing as Node's Buffer. Node's Buffer was created in
|
||||
* 2009 before JavaScript had the concept of ArrayBuffers. It's simply a
|
||||
* non-standard ArrayBuffer.
|
||||
*
|
||||
* ArrayBuffer is a fixed memory allocation. Buffer is implemented on top of
|
||||
* ArrayBuffer.
|
||||
*
|
||||
* Based on [Go Buffer](https://golang.org/pkg/bytes/#Buffer). */
|
||||
export class Buffer {
|
||||
constructor(ab) {
|
||||
_Buffer_instances.add(this);
|
||||
_Buffer_buf.set(this, void 0); // contents are the bytes buf[off : len(buf)]
|
||||
_Buffer_off.set(this, 0); // read at buf[off], write at buf[buf.byteLength]
|
||||
__classPrivateFieldSet(this, _Buffer_buf, ab === undefined ? new Uint8Array(0) : new Uint8Array(ab), "f");
|
||||
}
|
||||
/** Returns a slice holding the unread portion of the buffer.
|
||||
*
|
||||
* The slice is valid for use only until the next buffer modification (that
|
||||
* is, only until the next call to a method like `read()`, `write()`,
|
||||
* `reset()`, or `truncate()`). If `options.copy` is false the slice aliases the buffer content at
|
||||
* least until the next buffer modification, so immediate changes to the
|
||||
* slice will affect the result of future reads.
|
||||
* @param options Defaults to `{ copy: true }`
|
||||
*/
|
||||
bytes(options = { copy: true }) {
|
||||
if (options.copy === false)
|
||||
return __classPrivateFieldGet(this, _Buffer_buf, "f").subarray(__classPrivateFieldGet(this, _Buffer_off, "f"));
|
||||
return __classPrivateFieldGet(this, _Buffer_buf, "f").slice(__classPrivateFieldGet(this, _Buffer_off, "f"));
|
||||
}
|
||||
/** Returns whether the unread portion of the buffer is empty. */
|
||||
empty() {
|
||||
return __classPrivateFieldGet(this, _Buffer_buf, "f").byteLength <= __classPrivateFieldGet(this, _Buffer_off, "f");
|
||||
}
|
||||
/** A read only number of bytes of the unread portion of the buffer. */
|
||||
get length() {
|
||||
return __classPrivateFieldGet(this, _Buffer_buf, "f").byteLength - __classPrivateFieldGet(this, _Buffer_off, "f");
|
||||
}
|
||||
/** The read only capacity of the buffer's underlying byte slice, that is,
|
||||
* the total space allocated for the buffer's data. */
|
||||
get capacity() {
|
||||
return __classPrivateFieldGet(this, _Buffer_buf, "f").buffer.byteLength;
|
||||
}
|
||||
/** Discards all but the first `n` unread bytes from the buffer but
|
||||
* continues to use the same allocated storage. It throws if `n` is
|
||||
* negative or greater than the length of the buffer. */
|
||||
truncate(n) {
|
||||
if (n === 0) {
|
||||
this.reset();
|
||||
return;
|
||||
}
|
||||
if (n < 0 || n > this.length) {
|
||||
throw Error("bytes.Buffer: truncation out of range");
|
||||
}
|
||||
__classPrivateFieldGet(this, _Buffer_instances, "m", _Buffer_reslice).call(this, __classPrivateFieldGet(this, _Buffer_off, "f") + n);
|
||||
}
|
||||
reset() {
|
||||
__classPrivateFieldGet(this, _Buffer_instances, "m", _Buffer_reslice).call(this, 0);
|
||||
__classPrivateFieldSet(this, _Buffer_off, 0, "f");
|
||||
}
|
||||
/** Reads the next `p.length` bytes from the buffer or until the buffer is
|
||||
* drained. Returns the number of bytes read. If the buffer has no data to
|
||||
* return, the return is EOF (`null`). */
|
||||
readSync(p) {
|
||||
if (this.empty()) {
|
||||
// Buffer is empty, reset to recover space.
|
||||
this.reset();
|
||||
if (p.byteLength === 0) {
|
||||
// this edge case is tested in 'bufferReadEmptyAtEOF' test
|
||||
return 0;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
const nread = copy(__classPrivateFieldGet(this, _Buffer_buf, "f").subarray(__classPrivateFieldGet(this, _Buffer_off, "f")), p);
|
||||
__classPrivateFieldSet(this, _Buffer_off, __classPrivateFieldGet(this, _Buffer_off, "f") + nread, "f");
|
||||
return nread;
|
||||
}
|
||||
/** Reads the next `p.length` bytes from the buffer or until the buffer is
|
||||
* drained. Resolves to the number of bytes read. If the buffer has no
|
||||
* data to return, resolves to EOF (`null`).
|
||||
*
|
||||
* NOTE: This methods reads bytes synchronously; it's provided for
|
||||
* compatibility with `Reader` interfaces.
|
||||
*/
|
||||
read(p) {
|
||||
const rr = this.readSync(p);
|
||||
return Promise.resolve(rr);
|
||||
}
|
||||
writeSync(p) {
|
||||
const m = __classPrivateFieldGet(this, _Buffer_instances, "m", _Buffer_grow).call(this, p.byteLength);
|
||||
return copy(p, __classPrivateFieldGet(this, _Buffer_buf, "f"), m);
|
||||
}
|
||||
/** NOTE: This methods writes bytes synchronously; it's provided for
|
||||
* compatibility with `Writer` interface. */
|
||||
write(p) {
|
||||
const n = this.writeSync(p);
|
||||
return Promise.resolve(n);
|
||||
}
|
||||
/** Grows the buffer's capacity, if necessary, to guarantee space for
|
||||
* another `n` bytes. After `.grow(n)`, at least `n` bytes can be written to
|
||||
* the buffer without another allocation. If `n` is negative, `.grow()` will
|
||||
* throw. If the buffer can't grow it will throw an error.
|
||||
*
|
||||
* Based on Go Lang's
|
||||
* [Buffer.Grow](https://golang.org/pkg/bytes/#Buffer.Grow). */
|
||||
grow(n) {
|
||||
if (n < 0) {
|
||||
throw Error("Buffer.grow: negative count");
|
||||
}
|
||||
const m = __classPrivateFieldGet(this, _Buffer_instances, "m", _Buffer_grow).call(this, n);
|
||||
__classPrivateFieldGet(this, _Buffer_instances, "m", _Buffer_reslice).call(this, m);
|
||||
}
|
||||
/** Reads data from `r` until EOF (`null`) and appends it to the buffer,
|
||||
* growing the buffer as needed. It resolves to the number of bytes read.
|
||||
* If the buffer becomes too large, `.readFrom()` will reject with an error.
|
||||
*
|
||||
* Based on Go Lang's
|
||||
* [Buffer.ReadFrom](https://golang.org/pkg/bytes/#Buffer.ReadFrom). */
|
||||
async readFrom(r) {
|
||||
let n = 0;
|
||||
const tmp = new Uint8Array(MIN_READ);
|
||||
while (true) {
|
||||
const shouldGrow = this.capacity - this.length < MIN_READ;
|
||||
// read into tmp buffer if there's not enough room
|
||||
// otherwise read directly into the internal buffer
|
||||
const buf = shouldGrow
|
||||
? tmp
|
||||
: new Uint8Array(__classPrivateFieldGet(this, _Buffer_buf, "f").buffer, this.length);
|
||||
const nread = await r.read(buf);
|
||||
if (nread === null) {
|
||||
return n;
|
||||
}
|
||||
// write will grow if needed
|
||||
if (shouldGrow)
|
||||
this.writeSync(buf.subarray(0, nread));
|
||||
else
|
||||
__classPrivateFieldGet(this, _Buffer_instances, "m", _Buffer_reslice).call(this, this.length + nread);
|
||||
n += nread;
|
||||
}
|
||||
}
|
||||
/** Reads data from `r` until EOF (`null`) and appends it to the buffer,
|
||||
* growing the buffer as needed. It returns the number of bytes read. If the
|
||||
* buffer becomes too large, `.readFromSync()` will throw an error.
|
||||
*
|
||||
* Based on Go Lang's
|
||||
* [Buffer.ReadFrom](https://golang.org/pkg/bytes/#Buffer.ReadFrom). */
|
||||
readFromSync(r) {
|
||||
let n = 0;
|
||||
const tmp = new Uint8Array(MIN_READ);
|
||||
while (true) {
|
||||
const shouldGrow = this.capacity - this.length < MIN_READ;
|
||||
// read into tmp buffer if there's not enough room
|
||||
// otherwise read directly into the internal buffer
|
||||
const buf = shouldGrow
|
||||
? tmp
|
||||
: new Uint8Array(__classPrivateFieldGet(this, _Buffer_buf, "f").buffer, this.length);
|
||||
const nread = r.readSync(buf);
|
||||
if (nread === null) {
|
||||
return n;
|
||||
}
|
||||
// write will grow if needed
|
||||
if (shouldGrow)
|
||||
this.writeSync(buf.subarray(0, nread));
|
||||
else
|
||||
__classPrivateFieldGet(this, _Buffer_instances, "m", _Buffer_reslice).call(this, this.length + nread);
|
||||
n += nread;
|
||||
}
|
||||
}
|
||||
}
|
||||
_Buffer_buf = new WeakMap(), _Buffer_off = new WeakMap(), _Buffer_instances = new WeakSet(), _Buffer_tryGrowByReslice = function _Buffer_tryGrowByReslice(n) {
|
||||
const l = __classPrivateFieldGet(this, _Buffer_buf, "f").byteLength;
|
||||
if (n <= this.capacity - l) {
|
||||
__classPrivateFieldGet(this, _Buffer_instances, "m", _Buffer_reslice).call(this, l + n);
|
||||
return l;
|
||||
}
|
||||
return -1;
|
||||
}, _Buffer_reslice = function _Buffer_reslice(len) {
|
||||
assert(len <= __classPrivateFieldGet(this, _Buffer_buf, "f").buffer.byteLength);
|
||||
__classPrivateFieldSet(this, _Buffer_buf, new Uint8Array(__classPrivateFieldGet(this, _Buffer_buf, "f").buffer, 0, len), "f");
|
||||
}, _Buffer_grow = function _Buffer_grow(n) {
|
||||
const m = this.length;
|
||||
// If buffer is empty, reset to recover space.
|
||||
if (m === 0 && __classPrivateFieldGet(this, _Buffer_off, "f") !== 0) {
|
||||
this.reset();
|
||||
}
|
||||
// Fast: Try to grow by means of a reslice.
|
||||
const i = __classPrivateFieldGet(this, _Buffer_instances, "m", _Buffer_tryGrowByReslice).call(this, n);
|
||||
if (i >= 0) {
|
||||
return i;
|
||||
}
|
||||
const c = this.capacity;
|
||||
if (n <= Math.floor(c / 2) - m) {
|
||||
// We can slide things down instead of allocating a new
|
||||
// ArrayBuffer. We only need m+n <= c to slide, but
|
||||
// we instead let capacity get twice as large so we
|
||||
// don't spend all our time copying.
|
||||
copy(__classPrivateFieldGet(this, _Buffer_buf, "f").subarray(__classPrivateFieldGet(this, _Buffer_off, "f")), __classPrivateFieldGet(this, _Buffer_buf, "f"));
|
||||
}
|
||||
else if (c + n > MAX_SIZE) {
|
||||
throw new Error("The buffer cannot be grown beyond the maximum size.");
|
||||
}
|
||||
else {
|
||||
// Not enough space anywhere, we need to allocate.
|
||||
const buf = new Uint8Array(Math.min(2 * c + n, MAX_SIZE));
|
||||
copy(__classPrivateFieldGet(this, _Buffer_buf, "f").subarray(__classPrivateFieldGet(this, _Buffer_off, "f")), buf);
|
||||
__classPrivateFieldSet(this, _Buffer_buf, buf, "f");
|
||||
}
|
||||
// Restore this.#off and len(this.#buf).
|
||||
__classPrivateFieldSet(this, _Buffer_off, 0, "f");
|
||||
__classPrivateFieldGet(this, _Buffer_instances, "m", _Buffer_reslice).call(this, Math.min(m + n, MAX_SIZE));
|
||||
return m;
|
||||
};
|
||||
const DEFAULT_BUF_SIZE = 4096;
|
||||
const MIN_BUF_SIZE = 16;
|
||||
const MAX_CONSECUTIVE_EMPTY_READS = 100;
|
||||
const CR = "\r".charCodeAt(0);
|
||||
const LF = "\n".charCodeAt(0);
|
||||
export class BufferFullError extends Error {
|
||||
constructor(partial) {
|
||||
super("Buffer full");
|
||||
Object.defineProperty(this, "partial", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: partial
|
||||
});
|
||||
Object.defineProperty(this, "name", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: "BufferFullError"
|
||||
});
|
||||
}
|
||||
}
|
||||
export class PartialReadError extends Error {
|
||||
constructor() {
|
||||
super("Encountered UnexpectedEof, data only partially read");
|
||||
Object.defineProperty(this, "name", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: "PartialReadError"
|
||||
});
|
||||
Object.defineProperty(this, "partial", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
}
|
||||
}
|
||||
/** BufReader implements buffering for a Reader object. */
|
||||
export class BufReader {
|
||||
// private lastByte: number;
|
||||
// private lastCharSize: number;
|
||||
/** return new BufReader unless r is BufReader */
|
||||
static create(r, size = DEFAULT_BUF_SIZE) {
|
||||
return r instanceof BufReader ? r : new BufReader(r, size);
|
||||
}
|
||||
constructor(rd, size = DEFAULT_BUF_SIZE) {
|
||||
_BufReader_buf.set(this, void 0);
|
||||
_BufReader_rd.set(this, void 0); // Reader provided by caller.
|
||||
_BufReader_r.set(this, 0); // buf read position.
|
||||
_BufReader_w.set(this, 0); // buf write position.
|
||||
_BufReader_eof.set(this, false);
|
||||
// Reads a new chunk into the buffer.
|
||||
_BufReader_fill.set(this, async () => {
|
||||
// Slide existing data to beginning.
|
||||
if (__classPrivateFieldGet(this, _BufReader_r, "f") > 0) {
|
||||
__classPrivateFieldGet(this, _BufReader_buf, "f").copyWithin(0, __classPrivateFieldGet(this, _BufReader_r, "f"), __classPrivateFieldGet(this, _BufReader_w, "f"));
|
||||
__classPrivateFieldSet(this, _BufReader_w, __classPrivateFieldGet(this, _BufReader_w, "f") - __classPrivateFieldGet(this, _BufReader_r, "f"), "f");
|
||||
__classPrivateFieldSet(this, _BufReader_r, 0, "f");
|
||||
}
|
||||
if (__classPrivateFieldGet(this, _BufReader_w, "f") >= __classPrivateFieldGet(this, _BufReader_buf, "f").byteLength) {
|
||||
throw Error("bufio: tried to fill full buffer");
|
||||
}
|
||||
// Read new data: try a limited number of times.
|
||||
for (let i = MAX_CONSECUTIVE_EMPTY_READS; i > 0; i--) {
|
||||
const rr = await __classPrivateFieldGet(this, _BufReader_rd, "f").read(__classPrivateFieldGet(this, _BufReader_buf, "f").subarray(__classPrivateFieldGet(this, _BufReader_w, "f")));
|
||||
if (rr === null) {
|
||||
__classPrivateFieldSet(this, _BufReader_eof, true, "f");
|
||||
return;
|
||||
}
|
||||
assert(rr >= 0, "negative read");
|
||||
__classPrivateFieldSet(this, _BufReader_w, __classPrivateFieldGet(this, _BufReader_w, "f") + rr, "f");
|
||||
if (rr > 0) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
throw new Error(`No progress after ${MAX_CONSECUTIVE_EMPTY_READS} read() calls`);
|
||||
});
|
||||
_BufReader_reset.set(this, (buf, rd) => {
|
||||
__classPrivateFieldSet(this, _BufReader_buf, buf, "f");
|
||||
__classPrivateFieldSet(this, _BufReader_rd, rd, "f");
|
||||
__classPrivateFieldSet(this, _BufReader_eof, false, "f");
|
||||
// this.lastByte = -1;
|
||||
// this.lastCharSize = -1;
|
||||
});
|
||||
if (size < MIN_BUF_SIZE) {
|
||||
size = MIN_BUF_SIZE;
|
||||
}
|
||||
__classPrivateFieldGet(this, _BufReader_reset, "f").call(this, new Uint8Array(size), rd);
|
||||
}
|
||||
/** Returns the size of the underlying buffer in bytes. */
|
||||
size() {
|
||||
return __classPrivateFieldGet(this, _BufReader_buf, "f").byteLength;
|
||||
}
|
||||
buffered() {
|
||||
return __classPrivateFieldGet(this, _BufReader_w, "f") - __classPrivateFieldGet(this, _BufReader_r, "f");
|
||||
}
|
||||
/** Discards any buffered data, resets all state, and switches
|
||||
* the buffered reader to read from r.
|
||||
*/
|
||||
reset(r) {
|
||||
__classPrivateFieldGet(this, _BufReader_reset, "f").call(this, __classPrivateFieldGet(this, _BufReader_buf, "f"), r);
|
||||
}
|
||||
/** reads data into p.
|
||||
* It returns the number of bytes read into p.
|
||||
* The bytes are taken from at most one Read on the underlying Reader,
|
||||
* hence n may be less than len(p).
|
||||
* To read exactly len(p) bytes, use io.ReadFull(b, p).
|
||||
*/
|
||||
async read(p) {
|
||||
let rr = p.byteLength;
|
||||
if (p.byteLength === 0)
|
||||
return rr;
|
||||
if (__classPrivateFieldGet(this, _BufReader_r, "f") === __classPrivateFieldGet(this, _BufReader_w, "f")) {
|
||||
if (p.byteLength >= __classPrivateFieldGet(this, _BufReader_buf, "f").byteLength) {
|
||||
// Large read, empty buffer.
|
||||
// Read directly into p to avoid copy.
|
||||
const rr = await __classPrivateFieldGet(this, _BufReader_rd, "f").read(p);
|
||||
const nread = rr ?? 0;
|
||||
assert(nread >= 0, "negative read");
|
||||
// if (rr.nread > 0) {
|
||||
// this.lastByte = p[rr.nread - 1];
|
||||
// this.lastCharSize = -1;
|
||||
// }
|
||||
return rr;
|
||||
}
|
||||
// One read.
|
||||
// Do not use this.fill, which will loop.
|
||||
__classPrivateFieldSet(this, _BufReader_r, 0, "f");
|
||||
__classPrivateFieldSet(this, _BufReader_w, 0, "f");
|
||||
rr = await __classPrivateFieldGet(this, _BufReader_rd, "f").read(__classPrivateFieldGet(this, _BufReader_buf, "f"));
|
||||
if (rr === 0 || rr === null)
|
||||
return rr;
|
||||
assert(rr >= 0, "negative read");
|
||||
__classPrivateFieldSet(this, _BufReader_w, __classPrivateFieldGet(this, _BufReader_w, "f") + rr, "f");
|
||||
}
|
||||
// copy as much as we can
|
||||
const copied = copy(__classPrivateFieldGet(this, _BufReader_buf, "f").subarray(__classPrivateFieldGet(this, _BufReader_r, "f"), __classPrivateFieldGet(this, _BufReader_w, "f")), p, 0);
|
||||
__classPrivateFieldSet(this, _BufReader_r, __classPrivateFieldGet(this, _BufReader_r, "f") + copied, "f");
|
||||
// this.lastByte = this.buf[this.r - 1];
|
||||
// this.lastCharSize = -1;
|
||||
return copied;
|
||||
}
|
||||
/** reads exactly `p.length` bytes into `p`.
|
||||
*
|
||||
* If successful, `p` is returned.
|
||||
*
|
||||
* If the end of the underlying stream has been reached, and there are no more
|
||||
* bytes available in the buffer, `readFull()` returns `null` instead.
|
||||
*
|
||||
* An error is thrown if some bytes could be read, but not enough to fill `p`
|
||||
* entirely before the underlying stream reported an error or EOF. Any error
|
||||
* thrown will have a `partial` property that indicates the slice of the
|
||||
* buffer that has been successfully filled with data.
|
||||
*
|
||||
* Ported from https://golang.org/pkg/io/#ReadFull
|
||||
*/
|
||||
async readFull(p) {
|
||||
let bytesRead = 0;
|
||||
while (bytesRead < p.length) {
|
||||
try {
|
||||
const rr = await this.read(p.subarray(bytesRead));
|
||||
if (rr === null) {
|
||||
if (bytesRead === 0) {
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
throw new PartialReadError();
|
||||
}
|
||||
}
|
||||
bytesRead += rr;
|
||||
}
|
||||
catch (err) {
|
||||
if (err instanceof PartialReadError) {
|
||||
err.partial = p.subarray(0, bytesRead);
|
||||
}
|
||||
else if (err instanceof Error) {
|
||||
const e = new PartialReadError();
|
||||
e.partial = p.subarray(0, bytesRead);
|
||||
e.stack = err.stack;
|
||||
e.message = err.message;
|
||||
e.cause = err.cause;
|
||||
throw err;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
return p;
|
||||
}
|
||||
/** Returns the next byte [0, 255] or `null`. */
|
||||
async readByte() {
|
||||
var _a;
|
||||
while (__classPrivateFieldGet(this, _BufReader_r, "f") === __classPrivateFieldGet(this, _BufReader_w, "f")) {
|
||||
if (__classPrivateFieldGet(this, _BufReader_eof, "f"))
|
||||
return null;
|
||||
await __classPrivateFieldGet(this, _BufReader_fill, "f").call(this); // buffer is empty.
|
||||
}
|
||||
const c = __classPrivateFieldGet(this, _BufReader_buf, "f")[__classPrivateFieldGet(this, _BufReader_r, "f")];
|
||||
__classPrivateFieldSet(this, _BufReader_r, (_a = __classPrivateFieldGet(this, _BufReader_r, "f"), _a++, _a), "f");
|
||||
// this.lastByte = c;
|
||||
return c;
|
||||
}
|
||||
/** readString() reads until the first occurrence of delim in the input,
|
||||
* returning a string containing the data up to and including the delimiter.
|
||||
* If ReadString encounters an error before finding a delimiter,
|
||||
* it returns the data read before the error and the error itself
|
||||
* (often `null`).
|
||||
* ReadString returns err != nil if and only if the returned data does not end
|
||||
* in delim.
|
||||
* For simple uses, a Scanner may be more convenient.
|
||||
*/
|
||||
async readString(delim) {
|
||||
if (delim.length !== 1) {
|
||||
throw new Error("Delimiter should be a single character");
|
||||
}
|
||||
const buffer = await this.readSlice(delim.charCodeAt(0));
|
||||
if (buffer === null)
|
||||
return null;
|
||||
return new TextDecoder().decode(buffer);
|
||||
}
|
||||
/** `readLine()` is a low-level line-reading primitive. Most callers should
|
||||
* use `readString('\n')` instead or use a Scanner.
|
||||
*
|
||||
* `readLine()` tries to return a single line, not including the end-of-line
|
||||
* bytes. If the line was too long for the buffer then `more` is set and the
|
||||
* beginning of the line is returned. The rest of the line will be returned
|
||||
* from future calls. `more` will be false when returning the last fragment
|
||||
* of the line. The returned buffer is only valid until the next call to
|
||||
* `readLine()`.
|
||||
*
|
||||
* The text returned from ReadLine does not include the line end ("\r\n" or
|
||||
* "\n").
|
||||
*
|
||||
* When the end of the underlying stream is reached, the final bytes in the
|
||||
* stream are returned. No indication or error is given if the input ends
|
||||
* without a final line end. When there are no more trailing bytes to read,
|
||||
* `readLine()` returns `null`.
|
||||
*
|
||||
* Calling `unreadByte()` after `readLine()` will always unread the last byte
|
||||
* read (possibly a character belonging to the line end) even if that byte is
|
||||
* not part of the line returned by `readLine()`.
|
||||
*/
|
||||
async readLine() {
|
||||
var _a;
|
||||
let line = null;
|
||||
try {
|
||||
line = await this.readSlice(LF);
|
||||
}
|
||||
catch (err) {
|
||||
if (err instanceof dntShim.Deno.errors.BadResource) {
|
||||
throw err;
|
||||
}
|
||||
let partial;
|
||||
if (err instanceof PartialReadError) {
|
||||
partial = err.partial;
|
||||
assert(partial instanceof Uint8Array, "bufio: caught error from `readSlice()` without `partial` property");
|
||||
}
|
||||
// Don't throw if `readSlice()` failed with `BufferFullError`, instead we
|
||||
// just return whatever is available and set the `more` flag.
|
||||
if (!(err instanceof BufferFullError)) {
|
||||
throw err;
|
||||
}
|
||||
partial = err.partial;
|
||||
// Handle the case where "\r\n" straddles the buffer.
|
||||
if (!__classPrivateFieldGet(this, _BufReader_eof, "f") && partial &&
|
||||
partial.byteLength > 0 &&
|
||||
partial[partial.byteLength - 1] === CR) {
|
||||
// Put the '\r' back on buf and drop it from line.
|
||||
// Let the next call to ReadLine check for "\r\n".
|
||||
assert(__classPrivateFieldGet(this, _BufReader_r, "f") > 0, "bufio: tried to rewind past start of buffer");
|
||||
__classPrivateFieldSet(this, _BufReader_r, (_a = __classPrivateFieldGet(this, _BufReader_r, "f"), _a--, _a), "f");
|
||||
partial = partial.subarray(0, partial.byteLength - 1);
|
||||
}
|
||||
if (partial) {
|
||||
return { line: partial, more: !__classPrivateFieldGet(this, _BufReader_eof, "f") };
|
||||
}
|
||||
}
|
||||
if (line === null) {
|
||||
return null;
|
||||
}
|
||||
if (line.byteLength === 0) {
|
||||
return { line, more: false };
|
||||
}
|
||||
if (line[line.byteLength - 1] == LF) {
|
||||
let drop = 1;
|
||||
if (line.byteLength > 1 && line[line.byteLength - 2] === CR) {
|
||||
drop = 2;
|
||||
}
|
||||
line = line.subarray(0, line.byteLength - drop);
|
||||
}
|
||||
return { line, more: false };
|
||||
}
|
||||
/** `readSlice()` reads until the first occurrence of `delim` in the input,
|
||||
* returning a slice pointing at the bytes in the buffer. The bytes stop
|
||||
* being valid at the next read.
|
||||
*
|
||||
* If `readSlice()` encounters an error before finding a delimiter, or the
|
||||
* buffer fills without finding a delimiter, it throws an error with a
|
||||
* `partial` property that contains the entire buffer.
|
||||
*
|
||||
* If `readSlice()` encounters the end of the underlying stream and there are
|
||||
* any bytes left in the buffer, the rest of the buffer is returned. In other
|
||||
* words, EOF is always treated as a delimiter. Once the buffer is empty,
|
||||
* it returns `null`.
|
||||
*
|
||||
* Because the data returned from `readSlice()` will be overwritten by the
|
||||
* next I/O operation, most clients should use `readString()` instead.
|
||||
*/
|
||||
async readSlice(delim) {
|
||||
let s = 0; // search start index
|
||||
let slice;
|
||||
while (true) {
|
||||
// Search buffer.
|
||||
let i = __classPrivateFieldGet(this, _BufReader_buf, "f").subarray(__classPrivateFieldGet(this, _BufReader_r, "f") + s, __classPrivateFieldGet(this, _BufReader_w, "f")).indexOf(delim);
|
||||
if (i >= 0) {
|
||||
i += s;
|
||||
slice = __classPrivateFieldGet(this, _BufReader_buf, "f").subarray(__classPrivateFieldGet(this, _BufReader_r, "f"), __classPrivateFieldGet(this, _BufReader_r, "f") + i + 1);
|
||||
__classPrivateFieldSet(this, _BufReader_r, __classPrivateFieldGet(this, _BufReader_r, "f") + (i + 1), "f");
|
||||
break;
|
||||
}
|
||||
// EOF?
|
||||
if (__classPrivateFieldGet(this, _BufReader_eof, "f")) {
|
||||
if (__classPrivateFieldGet(this, _BufReader_r, "f") === __classPrivateFieldGet(this, _BufReader_w, "f")) {
|
||||
return null;
|
||||
}
|
||||
slice = __classPrivateFieldGet(this, _BufReader_buf, "f").subarray(__classPrivateFieldGet(this, _BufReader_r, "f"), __classPrivateFieldGet(this, _BufReader_w, "f"));
|
||||
__classPrivateFieldSet(this, _BufReader_r, __classPrivateFieldGet(this, _BufReader_w, "f"), "f");
|
||||
break;
|
||||
}
|
||||
// Buffer full?
|
||||
if (this.buffered() >= __classPrivateFieldGet(this, _BufReader_buf, "f").byteLength) {
|
||||
__classPrivateFieldSet(this, _BufReader_r, __classPrivateFieldGet(this, _BufReader_w, "f"), "f");
|
||||
// #4521 The internal buffer should not be reused across reads because it causes corruption of data.
|
||||
const oldbuf = __classPrivateFieldGet(this, _BufReader_buf, "f");
|
||||
const newbuf = __classPrivateFieldGet(this, _BufReader_buf, "f").slice(0);
|
||||
__classPrivateFieldSet(this, _BufReader_buf, newbuf, "f");
|
||||
throw new BufferFullError(oldbuf);
|
||||
}
|
||||
s = __classPrivateFieldGet(this, _BufReader_w, "f") - __classPrivateFieldGet(this, _BufReader_r, "f"); // do not rescan area we scanned before
|
||||
// Buffer is not full.
|
||||
try {
|
||||
await __classPrivateFieldGet(this, _BufReader_fill, "f").call(this);
|
||||
}
|
||||
catch (err) {
|
||||
if (err instanceof PartialReadError) {
|
||||
err.partial = slice;
|
||||
}
|
||||
else if (err instanceof Error) {
|
||||
const e = new PartialReadError();
|
||||
e.partial = slice;
|
||||
e.stack = err.stack;
|
||||
e.message = err.message;
|
||||
e.cause = err.cause;
|
||||
throw err;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
// Handle last byte, if any.
|
||||
// const i = slice.byteLength - 1;
|
||||
// if (i >= 0) {
|
||||
// this.lastByte = slice[i];
|
||||
// this.lastCharSize = -1
|
||||
// }
|
||||
return slice;
|
||||
}
|
||||
/** `peek()` returns the next `n` bytes without advancing the reader. The
|
||||
* bytes stop being valid at the next read call.
|
||||
*
|
||||
* When the end of the underlying stream is reached, but there are unread
|
||||
* bytes left in the buffer, those bytes are returned. If there are no bytes
|
||||
* left in the buffer, it returns `null`.
|
||||
*
|
||||
* If an error is encountered before `n` bytes are available, `peek()` throws
|
||||
* an error with the `partial` property set to a slice of the buffer that
|
||||
* contains the bytes that were available before the error occurred.
|
||||
*/
|
||||
async peek(n) {
|
||||
if (n < 0) {
|
||||
throw Error("negative count");
|
||||
}
|
||||
let avail = __classPrivateFieldGet(this, _BufReader_w, "f") - __classPrivateFieldGet(this, _BufReader_r, "f");
|
||||
while (avail < n && avail < __classPrivateFieldGet(this, _BufReader_buf, "f").byteLength && !__classPrivateFieldGet(this, _BufReader_eof, "f")) {
|
||||
try {
|
||||
await __classPrivateFieldGet(this, _BufReader_fill, "f").call(this);
|
||||
}
|
||||
catch (err) {
|
||||
if (err instanceof PartialReadError) {
|
||||
err.partial = __classPrivateFieldGet(this, _BufReader_buf, "f").subarray(__classPrivateFieldGet(this, _BufReader_r, "f"), __classPrivateFieldGet(this, _BufReader_w, "f"));
|
||||
}
|
||||
else if (err instanceof Error) {
|
||||
const e = new PartialReadError();
|
||||
e.partial = __classPrivateFieldGet(this, _BufReader_buf, "f").subarray(__classPrivateFieldGet(this, _BufReader_r, "f"), __classPrivateFieldGet(this, _BufReader_w, "f"));
|
||||
e.stack = err.stack;
|
||||
e.message = err.message;
|
||||
e.cause = err.cause;
|
||||
throw err;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
avail = __classPrivateFieldGet(this, _BufReader_w, "f") - __classPrivateFieldGet(this, _BufReader_r, "f");
|
||||
}
|
||||
if (avail === 0 && __classPrivateFieldGet(this, _BufReader_eof, "f")) {
|
||||
return null;
|
||||
}
|
||||
else if (avail < n && __classPrivateFieldGet(this, _BufReader_eof, "f")) {
|
||||
return __classPrivateFieldGet(this, _BufReader_buf, "f").subarray(__classPrivateFieldGet(this, _BufReader_r, "f"), __classPrivateFieldGet(this, _BufReader_r, "f") + avail);
|
||||
}
|
||||
else if (avail < n) {
|
||||
throw new BufferFullError(__classPrivateFieldGet(this, _BufReader_buf, "f").subarray(__classPrivateFieldGet(this, _BufReader_r, "f"), __classPrivateFieldGet(this, _BufReader_w, "f")));
|
||||
}
|
||||
return __classPrivateFieldGet(this, _BufReader_buf, "f").subarray(__classPrivateFieldGet(this, _BufReader_r, "f"), __classPrivateFieldGet(this, _BufReader_r, "f") + n);
|
||||
}
|
||||
}
|
||||
_BufReader_buf = new WeakMap(), _BufReader_rd = new WeakMap(), _BufReader_r = new WeakMap(), _BufReader_w = new WeakMap(), _BufReader_eof = new WeakMap(), _BufReader_fill = new WeakMap(), _BufReader_reset = new WeakMap();
|
||||
class AbstractBufBase {
|
||||
constructor(buf) {
|
||||
Object.defineProperty(this, "buf", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: void 0
|
||||
});
|
||||
Object.defineProperty(this, "usedBufferBytes", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: 0
|
||||
});
|
||||
Object.defineProperty(this, "err", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: null
|
||||
});
|
||||
this.buf = buf;
|
||||
}
|
||||
/** Size returns the size of the underlying buffer in bytes. */
|
||||
size() {
|
||||
return this.buf.byteLength;
|
||||
}
|
||||
/** Returns how many bytes are unused in the buffer. */
|
||||
available() {
|
||||
return this.buf.byteLength - this.usedBufferBytes;
|
||||
}
|
||||
/** buffered returns the number of bytes that have been written into the
|
||||
* current buffer.
|
||||
*/
|
||||
buffered() {
|
||||
return this.usedBufferBytes;
|
||||
}
|
||||
}
|
||||
/** BufWriter implements buffering for an deno.Writer object.
|
||||
* If an error occurs writing to a Writer, no more data will be
|
||||
* accepted and all subsequent writes, and flush(), will return the error.
|
||||
* After all data has been written, the client should call the
|
||||
* flush() method to guarantee all data has been forwarded to
|
||||
* the underlying deno.Writer.
|
||||
*/
|
||||
export class BufWriter extends AbstractBufBase {
|
||||
/** return new BufWriter unless writer is BufWriter */
|
||||
static create(writer, size = DEFAULT_BUF_SIZE) {
|
||||
return writer instanceof BufWriter ? writer : new BufWriter(writer, size);
|
||||
}
|
||||
constructor(writer, size = DEFAULT_BUF_SIZE) {
|
||||
super(new Uint8Array(size <= 0 ? DEFAULT_BUF_SIZE : size));
|
||||
_BufWriter_writer.set(this, void 0);
|
||||
__classPrivateFieldSet(this, _BufWriter_writer, writer, "f");
|
||||
}
|
||||
/** Discards any unflushed buffered data, clears any error, and
|
||||
* resets buffer to write its output to w.
|
||||
*/
|
||||
reset(w) {
|
||||
this.err = null;
|
||||
this.usedBufferBytes = 0;
|
||||
__classPrivateFieldSet(this, _BufWriter_writer, w, "f");
|
||||
}
|
||||
/** Flush writes any buffered data to the underlying io.Writer. */
|
||||
async flush() {
|
||||
if (this.err !== null)
|
||||
throw this.err;
|
||||
if (this.usedBufferBytes === 0)
|
||||
return;
|
||||
try {
|
||||
const p = this.buf.subarray(0, this.usedBufferBytes);
|
||||
let nwritten = 0;
|
||||
while (nwritten < p.length) {
|
||||
nwritten += await __classPrivateFieldGet(this, _BufWriter_writer, "f").write(p.subarray(nwritten));
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
this.err = e;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
this.buf = new Uint8Array(this.buf.length);
|
||||
this.usedBufferBytes = 0;
|
||||
}
|
||||
/** Writes the contents of `data` into the buffer. If the contents won't fully
|
||||
* fit into the buffer, those bytes that can are copied into the buffer, the
|
||||
* buffer is the flushed to the writer and the remaining bytes are copied into
|
||||
* the now empty buffer.
|
||||
*
|
||||
* @return the number of bytes written to the buffer.
|
||||
*/
|
||||
async write(data) {
|
||||
if (this.err !== null)
|
||||
throw this.err;
|
||||
if (data.length === 0)
|
||||
return 0;
|
||||
let totalBytesWritten = 0;
|
||||
let numBytesWritten = 0;
|
||||
while (data.byteLength > this.available()) {
|
||||
if (this.buffered() === 0) {
|
||||
// Large write, empty buffer.
|
||||
// Write directly from data to avoid copy.
|
||||
try {
|
||||
numBytesWritten = await __classPrivateFieldGet(this, _BufWriter_writer, "f").write(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
this.err = e;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
else {
|
||||
numBytesWritten = copy(data, this.buf, this.usedBufferBytes);
|
||||
this.usedBufferBytes += numBytesWritten;
|
||||
await this.flush();
|
||||
}
|
||||
totalBytesWritten += numBytesWritten;
|
||||
data = data.subarray(numBytesWritten);
|
||||
}
|
||||
numBytesWritten = copy(data, this.buf, this.usedBufferBytes);
|
||||
this.usedBufferBytes += numBytesWritten;
|
||||
totalBytesWritten += numBytesWritten;
|
||||
return totalBytesWritten;
|
||||
}
|
||||
}
|
||||
_BufWriter_writer = new WeakMap();
|
||||
/** BufWriterSync implements buffering for a deno.WriterSync object.
|
||||
* If an error occurs writing to a WriterSync, no more data will be
|
||||
* accepted and all subsequent writes, and flush(), will return the error.
|
||||
* After all data has been written, the client should call the
|
||||
* flush() method to guarantee all data has been forwarded to
|
||||
* the underlying deno.WriterSync.
|
||||
*/
|
||||
export class BufWriterSync extends AbstractBufBase {
|
||||
/** return new BufWriterSync unless writer is BufWriterSync */
|
||||
static create(writer, size = DEFAULT_BUF_SIZE) {
|
||||
return writer instanceof BufWriterSync
|
||||
? writer
|
||||
: new BufWriterSync(writer, size);
|
||||
}
|
||||
constructor(writer, size = DEFAULT_BUF_SIZE) {
|
||||
super(new Uint8Array(size <= 0 ? DEFAULT_BUF_SIZE : size));
|
||||
_BufWriterSync_writer.set(this, void 0);
|
||||
__classPrivateFieldSet(this, _BufWriterSync_writer, writer, "f");
|
||||
}
|
||||
/** Discards any unflushed buffered data, clears any error, and
|
||||
* resets buffer to write its output to w.
|
||||
*/
|
||||
reset(w) {
|
||||
this.err = null;
|
||||
this.usedBufferBytes = 0;
|
||||
__classPrivateFieldSet(this, _BufWriterSync_writer, w, "f");
|
||||
}
|
||||
/** Flush writes any buffered data to the underlying io.WriterSync. */
|
||||
flush() {
|
||||
if (this.err !== null)
|
||||
throw this.err;
|
||||
if (this.usedBufferBytes === 0)
|
||||
return;
|
||||
try {
|
||||
const p = this.buf.subarray(0, this.usedBufferBytes);
|
||||
let nwritten = 0;
|
||||
while (nwritten < p.length) {
|
||||
nwritten += __classPrivateFieldGet(this, _BufWriterSync_writer, "f").writeSync(p.subarray(nwritten));
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
this.err = e;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
this.buf = new Uint8Array(this.buf.length);
|
||||
this.usedBufferBytes = 0;
|
||||
}
|
||||
/** Writes the contents of `data` into the buffer. If the contents won't fully
|
||||
* fit into the buffer, those bytes that can are copied into the buffer, the
|
||||
* buffer is the flushed to the writer and the remaining bytes are copied into
|
||||
* the now empty buffer.
|
||||
*
|
||||
* @return the number of bytes written to the buffer.
|
||||
*/
|
||||
writeSync(data) {
|
||||
if (this.err !== null)
|
||||
throw this.err;
|
||||
if (data.length === 0)
|
||||
return 0;
|
||||
let totalBytesWritten = 0;
|
||||
let numBytesWritten = 0;
|
||||
while (data.byteLength > this.available()) {
|
||||
if (this.buffered() === 0) {
|
||||
// Large write, empty buffer.
|
||||
// Write directly from data to avoid copy.
|
||||
try {
|
||||
numBytesWritten = __classPrivateFieldGet(this, _BufWriterSync_writer, "f").writeSync(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
this.err = e;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
else {
|
||||
numBytesWritten = copy(data, this.buf, this.usedBufferBytes);
|
||||
this.usedBufferBytes += numBytesWritten;
|
||||
this.flush();
|
||||
}
|
||||
totalBytesWritten += numBytesWritten;
|
||||
data = data.subarray(numBytesWritten);
|
||||
}
|
||||
numBytesWritten = copy(data, this.buf, this.usedBufferBytes);
|
||||
this.usedBufferBytes += numBytesWritten;
|
||||
totalBytesWritten += numBytesWritten;
|
||||
return totalBytesWritten;
|
||||
}
|
||||
}
|
||||
_BufWriterSync_writer = new WeakMap();
|
||||
/** Generate longest proper prefix which is also suffix array. */
|
||||
function createLPS(pat) {
|
||||
const lps = new Uint8Array(pat.length);
|
||||
lps[0] = 0;
|
||||
let prefixEnd = 0;
|
||||
let i = 1;
|
||||
while (i < lps.length) {
|
||||
if (pat[i] == pat[prefixEnd]) {
|
||||
prefixEnd++;
|
||||
lps[i] = prefixEnd;
|
||||
i++;
|
||||
}
|
||||
else if (prefixEnd === 0) {
|
||||
lps[i] = 0;
|
||||
i++;
|
||||
}
|
||||
else {
|
||||
prefixEnd = lps[prefixEnd - 1];
|
||||
}
|
||||
}
|
||||
return lps;
|
||||
}
|
||||
/** Read delimited bytes from a Reader. */
|
||||
export async function* readDelim(reader, delim) {
|
||||
// Avoid unicode problems
|
||||
const delimLen = delim.length;
|
||||
const delimLPS = createLPS(delim);
|
||||
const chunks = new BytesList();
|
||||
const bufSize = Math.max(1024, delimLen + 1);
|
||||
// Modified KMP
|
||||
let inspectIndex = 0;
|
||||
let matchIndex = 0;
|
||||
while (true) {
|
||||
const inspectArr = new Uint8Array(bufSize);
|
||||
const result = await reader.read(inspectArr);
|
||||
if (result === null) {
|
||||
// Yield last chunk.
|
||||
yield chunks.concat();
|
||||
return;
|
||||
}
|
||||
else if (result < 0) {
|
||||
// Discard all remaining and silently fail.
|
||||
return;
|
||||
}
|
||||
chunks.add(inspectArr, 0, result);
|
||||
let localIndex = 0;
|
||||
while (inspectIndex < chunks.size()) {
|
||||
if (inspectArr[localIndex] === delim[matchIndex]) {
|
||||
inspectIndex++;
|
||||
localIndex++;
|
||||
matchIndex++;
|
||||
if (matchIndex === delimLen) {
|
||||
// Full match
|
||||
const matchEnd = inspectIndex - delimLen;
|
||||
const readyBytes = chunks.slice(0, matchEnd);
|
||||
yield readyBytes;
|
||||
// Reset match, different from KMP.
|
||||
chunks.shift(inspectIndex);
|
||||
inspectIndex = 0;
|
||||
matchIndex = 0;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (matchIndex === 0) {
|
||||
inspectIndex++;
|
||||
localIndex++;
|
||||
}
|
||||
else {
|
||||
matchIndex = delimLPS[matchIndex - 1];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/** Read delimited strings from a Reader. */
|
||||
export async function* readStringDelim(reader, delim, decoderOpts) {
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder(decoderOpts?.encoding, decoderOpts);
|
||||
for await (const chunk of readDelim(reader, encoder.encode(delim))) {
|
||||
yield decoder.decode(chunk);
|
||||
}
|
||||
}
|
||||
/** Read strings line-by-line from a Reader. */
|
||||
export async function* readLines(reader, decoderOpts) {
|
||||
const bufReader = new BufReader(reader);
|
||||
let chunks = [];
|
||||
const decoder = new TextDecoder(decoderOpts?.encoding, decoderOpts);
|
||||
while (true) {
|
||||
const res = await bufReader.readLine();
|
||||
if (!res) {
|
||||
if (chunks.length > 0) {
|
||||
yield decoder.decode(concat(...chunks));
|
||||
}
|
||||
break;
|
||||
}
|
||||
chunks.push(res.line);
|
||||
if (!res.more) {
|
||||
yield decoder.decode(concat(...chunks));
|
||||
chunks = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,427 +0,0 @@
|
||||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
// A module to print ANSI terminal colors. Inspired by chalk, kleur, and colors
|
||||
// on npm.
|
||||
//
|
||||
// ```
|
||||
// import { bgBlue, red, bold } from "https://deno.land/std/fmt/colors.ts";
|
||||
// console.log(bgBlue(red(bold("Hello world!"))));
|
||||
// ```
|
||||
//
|
||||
// This module supports `NO_COLOR` environmental variable disabling any coloring
|
||||
// if `NO_COLOR` is set.
|
||||
//
|
||||
// This module is browser compatible.
|
||||
import * as dntShim from "../../../../_dnt.test_shims.js";
|
||||
const noColor = dntShim.dntGlobalThis.Deno?.noColor ?? true;
|
||||
let enabled = !noColor;
|
||||
/**
|
||||
* Set changing text color to enabled or disabled
|
||||
* @param value
|
||||
*/
|
||||
export function setColorEnabled(value) {
|
||||
if (noColor) {
|
||||
return;
|
||||
}
|
||||
enabled = value;
|
||||
}
|
||||
/** Get whether text color change is enabled or disabled. */
|
||||
export function getColorEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
/**
|
||||
* Builds color code
|
||||
* @param open
|
||||
* @param close
|
||||
*/
|
||||
function code(open, close) {
|
||||
return {
|
||||
open: `\x1b[${open.join(";")}m`,
|
||||
close: `\x1b[${close}m`,
|
||||
regexp: new RegExp(`\\x1b\\[${close}m`, "g"),
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Applies color and background based on color code and its associated text
|
||||
* @param str text to apply color settings to
|
||||
* @param code color code to apply
|
||||
*/
|
||||
function run(str, code) {
|
||||
return enabled
|
||||
? `${code.open}${str.replace(code.regexp, code.open)}${code.close}`
|
||||
: str;
|
||||
}
|
||||
/**
|
||||
* Reset the text modified
|
||||
* @param str text to reset
|
||||
*/
|
||||
export function reset(str) {
|
||||
return run(str, code([0], 0));
|
||||
}
|
||||
/**
|
||||
* Make the text bold.
|
||||
* @param str text to make bold
|
||||
*/
|
||||
export function bold(str) {
|
||||
return run(str, code([1], 22));
|
||||
}
|
||||
/**
|
||||
* The text emits only a small amount of light.
|
||||
* @param str text to dim
|
||||
*/
|
||||
export function dim(str) {
|
||||
return run(str, code([2], 22));
|
||||
}
|
||||
/**
|
||||
* Make the text italic.
|
||||
* @param str text to make italic
|
||||
*/
|
||||
export function italic(str) {
|
||||
return run(str, code([3], 23));
|
||||
}
|
||||
/**
|
||||
* Make the text underline.
|
||||
* @param str text to underline
|
||||
*/
|
||||
export function underline(str) {
|
||||
return run(str, code([4], 24));
|
||||
}
|
||||
/**
|
||||
* Invert background color and text color.
|
||||
* @param str text to invert its color
|
||||
*/
|
||||
export function inverse(str) {
|
||||
return run(str, code([7], 27));
|
||||
}
|
||||
/**
|
||||
* Make the text hidden.
|
||||
* @param str text to hide
|
||||
*/
|
||||
export function hidden(str) {
|
||||
return run(str, code([8], 28));
|
||||
}
|
||||
/**
|
||||
* Put horizontal line through the center of the text.
|
||||
* @param str text to strike through
|
||||
*/
|
||||
export function strikethrough(str) {
|
||||
return run(str, code([9], 29));
|
||||
}
|
||||
/**
|
||||
* Set text color to black.
|
||||
* @param str text to make black
|
||||
*/
|
||||
export function black(str) {
|
||||
return run(str, code([30], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to red.
|
||||
* @param str text to make red
|
||||
*/
|
||||
export function red(str) {
|
||||
return run(str, code([31], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to green.
|
||||
* @param str text to make green
|
||||
*/
|
||||
export function green(str) {
|
||||
return run(str, code([32], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to yellow.
|
||||
* @param str text to make yellow
|
||||
*/
|
||||
export function yellow(str) {
|
||||
return run(str, code([33], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to blue.
|
||||
* @param str text to make blue
|
||||
*/
|
||||
export function blue(str) {
|
||||
return run(str, code([34], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to magenta.
|
||||
* @param str text to make magenta
|
||||
*/
|
||||
export function magenta(str) {
|
||||
return run(str, code([35], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to cyan.
|
||||
* @param str text to make cyan
|
||||
*/
|
||||
export function cyan(str) {
|
||||
return run(str, code([36], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to white.
|
||||
* @param str text to make white
|
||||
*/
|
||||
export function white(str) {
|
||||
return run(str, code([37], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to gray.
|
||||
* @param str text to make gray
|
||||
*/
|
||||
export function gray(str) {
|
||||
return brightBlack(str);
|
||||
}
|
||||
/**
|
||||
* Set text color to bright black.
|
||||
* @param str text to make bright-black
|
||||
*/
|
||||
export function brightBlack(str) {
|
||||
return run(str, code([90], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to bright red.
|
||||
* @param str text to make bright-red
|
||||
*/
|
||||
export function brightRed(str) {
|
||||
return run(str, code([91], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to bright green.
|
||||
* @param str text to make bright-green
|
||||
*/
|
||||
export function brightGreen(str) {
|
||||
return run(str, code([92], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to bright yellow.
|
||||
* @param str text to make bright-yellow
|
||||
*/
|
||||
export function brightYellow(str) {
|
||||
return run(str, code([93], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to bright blue.
|
||||
* @param str text to make bright-blue
|
||||
*/
|
||||
export function brightBlue(str) {
|
||||
return run(str, code([94], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to bright magenta.
|
||||
* @param str text to make bright-magenta
|
||||
*/
|
||||
export function brightMagenta(str) {
|
||||
return run(str, code([95], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to bright cyan.
|
||||
* @param str text to make bright-cyan
|
||||
*/
|
||||
export function brightCyan(str) {
|
||||
return run(str, code([96], 39));
|
||||
}
|
||||
/**
|
||||
* Set text color to bright white.
|
||||
* @param str text to make bright-white
|
||||
*/
|
||||
export function brightWhite(str) {
|
||||
return run(str, code([97], 39));
|
||||
}
|
||||
/**
|
||||
* Set background color to black.
|
||||
* @param str text to make its background black
|
||||
*/
|
||||
export function bgBlack(str) {
|
||||
return run(str, code([40], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to red.
|
||||
* @param str text to make its background red
|
||||
*/
|
||||
export function bgRed(str) {
|
||||
return run(str, code([41], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to green.
|
||||
* @param str text to make its background green
|
||||
*/
|
||||
export function bgGreen(str) {
|
||||
return run(str, code([42], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to yellow.
|
||||
* @param str text to make its background yellow
|
||||
*/
|
||||
export function bgYellow(str) {
|
||||
return run(str, code([43], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to blue.
|
||||
* @param str text to make its background blue
|
||||
*/
|
||||
export function bgBlue(str) {
|
||||
return run(str, code([44], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to magenta.
|
||||
* @param str text to make its background magenta
|
||||
*/
|
||||
export function bgMagenta(str) {
|
||||
return run(str, code([45], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to cyan.
|
||||
* @param str text to make its background cyan
|
||||
*/
|
||||
export function bgCyan(str) {
|
||||
return run(str, code([46], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to white.
|
||||
* @param str text to make its background white
|
||||
*/
|
||||
export function bgWhite(str) {
|
||||
return run(str, code([47], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to bright black.
|
||||
* @param str text to make its background bright-black
|
||||
*/
|
||||
export function bgBrightBlack(str) {
|
||||
return run(str, code([100], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to bright red.
|
||||
* @param str text to make its background bright-red
|
||||
*/
|
||||
export function bgBrightRed(str) {
|
||||
return run(str, code([101], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to bright green.
|
||||
* @param str text to make its background bright-green
|
||||
*/
|
||||
export function bgBrightGreen(str) {
|
||||
return run(str, code([102], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to bright yellow.
|
||||
* @param str text to make its background bright-yellow
|
||||
*/
|
||||
export function bgBrightYellow(str) {
|
||||
return run(str, code([103], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to bright blue.
|
||||
* @param str text to make its background bright-blue
|
||||
*/
|
||||
export function bgBrightBlue(str) {
|
||||
return run(str, code([104], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to bright magenta.
|
||||
* @param str text to make its background bright-magenta
|
||||
*/
|
||||
export function bgBrightMagenta(str) {
|
||||
return run(str, code([105], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to bright cyan.
|
||||
* @param str text to make its background bright-cyan
|
||||
*/
|
||||
export function bgBrightCyan(str) {
|
||||
return run(str, code([106], 49));
|
||||
}
|
||||
/**
|
||||
* Set background color to bright white.
|
||||
* @param str text to make its background bright-white
|
||||
*/
|
||||
export function bgBrightWhite(str) {
|
||||
return run(str, code([107], 49));
|
||||
}
|
||||
/* Special Color Sequences */
|
||||
/**
|
||||
* Clam and truncate color codes
|
||||
* @param n
|
||||
* @param max number to truncate to
|
||||
* @param min number to truncate from
|
||||
*/
|
||||
function clampAndTruncate(n, max = 255, min = 0) {
|
||||
return Math.trunc(Math.max(Math.min(n, max), min));
|
||||
}
|
||||
/**
|
||||
* Set text color using paletted 8bit colors.
|
||||
* https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit
|
||||
* @param str text color to apply paletted 8bit colors to
|
||||
* @param color code
|
||||
*/
|
||||
export function rgb8(str, color) {
|
||||
return run(str, code([38, 5, clampAndTruncate(color)], 39));
|
||||
}
|
||||
/**
|
||||
* Set background color using paletted 8bit colors.
|
||||
* https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit
|
||||
* @param str text color to apply paletted 8bit background colors to
|
||||
* @param color code
|
||||
*/
|
||||
export function bgRgb8(str, color) {
|
||||
return run(str, code([48, 5, clampAndTruncate(color)], 49));
|
||||
}
|
||||
/**
|
||||
* Set text color using 24bit rgb.
|
||||
* `color` can be a number in range `0x000000` to `0xffffff` or
|
||||
* an `Rgb`.
|
||||
*
|
||||
* To produce the color magenta:
|
||||
*
|
||||
* rgb24("foo", 0xff00ff);
|
||||
* rgb24("foo", {r: 255, g: 0, b: 255});
|
||||
* @param str text color to apply 24bit rgb to
|
||||
* @param color code
|
||||
*/
|
||||
export function rgb24(str, color) {
|
||||
if (typeof color === "number") {
|
||||
return run(str, code([38, 2, (color >> 16) & 0xff, (color >> 8) & 0xff, color & 0xff], 39));
|
||||
}
|
||||
return run(str, code([
|
||||
38,
|
||||
2,
|
||||
clampAndTruncate(color.r),
|
||||
clampAndTruncate(color.g),
|
||||
clampAndTruncate(color.b),
|
||||
], 39));
|
||||
}
|
||||
/**
|
||||
* Set background color using 24bit rgb.
|
||||
* `color` can be a number in range `0x000000` to `0xffffff` or
|
||||
* an `Rgb`.
|
||||
*
|
||||
* To produce the color magenta:
|
||||
*
|
||||
* bgRgb24("foo", 0xff00ff);
|
||||
* bgRgb24("foo", {r: 255, g: 0, b: 255});
|
||||
* @param str text color to apply 24bit rgb to
|
||||
* @param color code
|
||||
*/
|
||||
export function bgRgb24(str, color) {
|
||||
if (typeof color === "number") {
|
||||
return run(str, code([48, 2, (color >> 16) & 0xff, (color >> 8) & 0xff, color & 0xff], 49));
|
||||
}
|
||||
return run(str, code([
|
||||
48,
|
||||
2,
|
||||
clampAndTruncate(color.r),
|
||||
clampAndTruncate(color.g),
|
||||
clampAndTruncate(color.b),
|
||||
], 49));
|
||||
}
|
||||
// https://github.com/chalk/ansi-regex/blob/2b56fb0c7a07108e5b54241e8faec160d393aedb/index.js
|
||||
const ANSI_PATTERN = new RegExp([
|
||||
"[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)",
|
||||
"(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))",
|
||||
].join("|"), "g");
|
||||
/**
|
||||
* Remove ANSI escape codes from the string.
|
||||
* @param string to remove ANSI escape codes from
|
||||
*/
|
||||
export function stripColor(string) {
|
||||
return string.replace(ANSI_PATTERN, "");
|
||||
}
|
||||
@@ -1,160 +0,0 @@
|
||||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
// This module is browser compatible.
|
||||
export var DiffType;
|
||||
(function (DiffType) {
|
||||
DiffType["removed"] = "removed";
|
||||
DiffType["common"] = "common";
|
||||
DiffType["added"] = "added";
|
||||
})(DiffType || (DiffType = {}));
|
||||
const REMOVED = 1;
|
||||
const COMMON = 2;
|
||||
const ADDED = 3;
|
||||
function createCommon(A, B, reverse) {
|
||||
const common = [];
|
||||
if (A.length === 0 || B.length === 0)
|
||||
return [];
|
||||
for (let i = 0; i < Math.min(A.length, B.length); i += 1) {
|
||||
if (A[reverse ? A.length - i - 1 : i] === B[reverse ? B.length - i - 1 : i]) {
|
||||
common.push(A[reverse ? A.length - i - 1 : i]);
|
||||
}
|
||||
else {
|
||||
return common;
|
||||
}
|
||||
}
|
||||
return common;
|
||||
}
|
||||
/**
|
||||
* Renders the differences between the actual and expected values
|
||||
* @param A Actual value
|
||||
* @param B Expected value
|
||||
*/
|
||||
export function diff(A, B) {
|
||||
const prefixCommon = createCommon(A, B);
|
||||
const suffixCommon = createCommon(A.slice(prefixCommon.length), B.slice(prefixCommon.length), true).reverse();
|
||||
A = suffixCommon.length
|
||||
? A.slice(prefixCommon.length, -suffixCommon.length)
|
||||
: A.slice(prefixCommon.length);
|
||||
B = suffixCommon.length
|
||||
? B.slice(prefixCommon.length, -suffixCommon.length)
|
||||
: B.slice(prefixCommon.length);
|
||||
const swapped = B.length > A.length;
|
||||
[A, B] = swapped ? [B, A] : [A, B];
|
||||
const M = A.length;
|
||||
const N = B.length;
|
||||
if (!M && !N && !suffixCommon.length && !prefixCommon.length)
|
||||
return [];
|
||||
if (!N) {
|
||||
return [
|
||||
...prefixCommon.map((c) => ({ type: DiffType.common, value: c })),
|
||||
...A.map((a) => ({
|
||||
type: swapped ? DiffType.added : DiffType.removed,
|
||||
value: a,
|
||||
})),
|
||||
...suffixCommon.map((c) => ({ type: DiffType.common, value: c })),
|
||||
];
|
||||
}
|
||||
const offset = N;
|
||||
const delta = M - N;
|
||||
const size = M + N + 1;
|
||||
const fp = new Array(size).fill({ y: -1 });
|
||||
/**
|
||||
* INFO:
|
||||
* This buffer is used to save memory and improve performance.
|
||||
* The first half is used to save route and last half is used to save diff
|
||||
* type.
|
||||
* This is because, when I kept new uint8array area to save type,performance
|
||||
* worsened.
|
||||
*/
|
||||
const routes = new Uint32Array((M * N + size + 1) * 2);
|
||||
const diffTypesPtrOffset = routes.length / 2;
|
||||
let ptr = 0;
|
||||
let p = -1;
|
||||
function backTrace(A, B, current, swapped) {
|
||||
const M = A.length;
|
||||
const N = B.length;
|
||||
const result = [];
|
||||
let a = M - 1;
|
||||
let b = N - 1;
|
||||
let j = routes[current.id];
|
||||
let type = routes[current.id + diffTypesPtrOffset];
|
||||
while (true) {
|
||||
if (!j && !type)
|
||||
break;
|
||||
const prev = j;
|
||||
if (type === REMOVED) {
|
||||
result.unshift({
|
||||
type: swapped ? DiffType.removed : DiffType.added,
|
||||
value: B[b],
|
||||
});
|
||||
b -= 1;
|
||||
}
|
||||
else if (type === ADDED) {
|
||||
result.unshift({
|
||||
type: swapped ? DiffType.added : DiffType.removed,
|
||||
value: A[a],
|
||||
});
|
||||
a -= 1;
|
||||
}
|
||||
else {
|
||||
result.unshift({ type: DiffType.common, value: A[a] });
|
||||
a -= 1;
|
||||
b -= 1;
|
||||
}
|
||||
j = routes[prev];
|
||||
type = routes[prev + diffTypesPtrOffset];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function createFP(slide, down, k, M) {
|
||||
if (slide && slide.y === -1 && down && down.y === -1) {
|
||||
return { y: 0, id: 0 };
|
||||
}
|
||||
if ((down && down.y === -1) ||
|
||||
k === M ||
|
||||
(slide && slide.y) > (down && down.y) + 1) {
|
||||
const prev = slide.id;
|
||||
ptr++;
|
||||
routes[ptr] = prev;
|
||||
routes[ptr + diffTypesPtrOffset] = ADDED;
|
||||
return { y: slide.y, id: ptr };
|
||||
}
|
||||
else {
|
||||
const prev = down.id;
|
||||
ptr++;
|
||||
routes[ptr] = prev;
|
||||
routes[ptr + diffTypesPtrOffset] = REMOVED;
|
||||
return { y: down.y + 1, id: ptr };
|
||||
}
|
||||
}
|
||||
function snake(k, slide, down, _offset, A, B) {
|
||||
const M = A.length;
|
||||
const N = B.length;
|
||||
if (k < -N || M < k)
|
||||
return { y: -1, id: -1 };
|
||||
const fp = createFP(slide, down, k, M);
|
||||
while (fp.y + k < M && fp.y < N && A[fp.y + k] === B[fp.y]) {
|
||||
const prev = fp.id;
|
||||
ptr++;
|
||||
fp.id = ptr;
|
||||
fp.y += 1;
|
||||
routes[ptr] = prev;
|
||||
routes[ptr + diffTypesPtrOffset] = COMMON;
|
||||
}
|
||||
return fp;
|
||||
}
|
||||
while (fp[delta + offset].y < N) {
|
||||
p = p + 1;
|
||||
for (let k = -p; k < delta; ++k) {
|
||||
fp[k + offset] = snake(k, fp[k - 1 + offset], fp[k + 1 + offset], offset, A, B);
|
||||
}
|
||||
for (let k = delta + p; k > delta; --k) {
|
||||
fp[k + offset] = snake(k, fp[k - 1 + offset], fp[k + 1 + offset], offset, A, B);
|
||||
}
|
||||
fp[delta + offset] = snake(delta, fp[delta - 1 + offset], fp[delta + 1 + offset], offset, A, B);
|
||||
}
|
||||
return [
|
||||
...prefixCommon.map((c) => ({ type: DiffType.common, value: c })),
|
||||
...backTrace(A, B, fp[delta + offset], swapped),
|
||||
...suffixCommon.map((c) => ({ type: DiffType.common, value: c })),
|
||||
];
|
||||
}
|
||||
@@ -1,439 +0,0 @@
|
||||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
// This module is browser compatible. Do not rely on good formatting of values
|
||||
// for AssertionError messages in browsers.
|
||||
import * as dntShim from "../../../../_dnt.test_shims.js";
|
||||
import { bold, gray, green, red, stripColor, white } from "../fmt/colors.js";
|
||||
import { diff, DiffType } from "./_diff.js";
|
||||
const CAN_NOT_DISPLAY = "[Cannot display]";
|
||||
export class AssertionError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = "AssertionError";
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Converts the input into a string. Objects, Sets and Maps are sorted so as to
|
||||
* make tests less flaky
|
||||
* @param v Value to be formatted
|
||||
*/
|
||||
export function _format(v) {
|
||||
return dntShim.dntGlobalThis.Deno
|
||||
? dntShim.Deno.inspect(v, {
|
||||
depth: Infinity,
|
||||
sorted: true,
|
||||
trailingComma: true,
|
||||
compact: false,
|
||||
iterableLimit: Infinity,
|
||||
})
|
||||
: `"${String(v).replace(/(?=["\\])/g, "\\")}"`;
|
||||
}
|
||||
/**
|
||||
* Colors the output of assertion diffs
|
||||
* @param diffType Difference type, either added or removed
|
||||
*/
|
||||
function createColor(diffType) {
|
||||
switch (diffType) {
|
||||
case DiffType.added:
|
||||
return (s) => green(bold(s));
|
||||
case DiffType.removed:
|
||||
return (s) => red(bold(s));
|
||||
default:
|
||||
return white;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Prefixes `+` or `-` in diff output
|
||||
* @param diffType Difference type, either added or removed
|
||||
*/
|
||||
function createSign(diffType) {
|
||||
switch (diffType) {
|
||||
case DiffType.added:
|
||||
return "+ ";
|
||||
case DiffType.removed:
|
||||
return "- ";
|
||||
default:
|
||||
return " ";
|
||||
}
|
||||
}
|
||||
function buildMessage(diffResult) {
|
||||
const messages = [];
|
||||
messages.push("");
|
||||
messages.push("");
|
||||
messages.push(` ${gray(bold("[Diff]"))} ${red(bold("Actual"))} / ${green(bold("Expected"))}`);
|
||||
messages.push("");
|
||||
messages.push("");
|
||||
diffResult.forEach((result) => {
|
||||
const c = createColor(result.type);
|
||||
messages.push(c(`${createSign(result.type)}${result.value}`));
|
||||
});
|
||||
messages.push("");
|
||||
return messages;
|
||||
}
|
||||
function isKeyedCollection(x) {
|
||||
return [Symbol.iterator, "size"].every((k) => k in x);
|
||||
}
|
||||
/**
|
||||
* Deep equality comparison used in assertions
|
||||
* @param c actual value
|
||||
* @param d expected value
|
||||
*/
|
||||
export function equal(c, d) {
|
||||
const seen = new Map();
|
||||
return (function compare(a, b) {
|
||||
// Have to render RegExp & Date for string comparison
|
||||
// unless it's mistreated as object
|
||||
if (a &&
|
||||
b &&
|
||||
((a instanceof RegExp && b instanceof RegExp) ||
|
||||
(a instanceof URL && b instanceof URL))) {
|
||||
return String(a) === String(b);
|
||||
}
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
const aTime = a.getTime();
|
||||
const bTime = b.getTime();
|
||||
// Check for NaN equality manually since NaN is not
|
||||
// equal to itself.
|
||||
if (Number.isNaN(aTime) && Number.isNaN(bTime)) {
|
||||
return true;
|
||||
}
|
||||
return a.getTime() === b.getTime();
|
||||
}
|
||||
if (Object.is(a, b)) {
|
||||
return true;
|
||||
}
|
||||
if (a && typeof a === "object" && b && typeof b === "object") {
|
||||
if (seen.get(a) === b) {
|
||||
return true;
|
||||
}
|
||||
if (Object.keys(a || {}).length !== Object.keys(b || {}).length) {
|
||||
return false;
|
||||
}
|
||||
if (isKeyedCollection(a) && isKeyedCollection(b)) {
|
||||
if (a.size !== b.size) {
|
||||
return false;
|
||||
}
|
||||
let unmatchedEntries = a.size;
|
||||
for (const [aKey, aValue] of a.entries()) {
|
||||
for (const [bKey, bValue] of b.entries()) {
|
||||
/* Given that Map keys can be references, we need
|
||||
* to ensure that they are also deeply equal */
|
||||
if ((aKey === aValue && bKey === bValue && compare(aKey, bKey)) ||
|
||||
(compare(aKey, bKey) && compare(aValue, bValue))) {
|
||||
unmatchedEntries--;
|
||||
}
|
||||
}
|
||||
}
|
||||
return unmatchedEntries === 0;
|
||||
}
|
||||
const merged = { ...a, ...b };
|
||||
for (const key of [
|
||||
...Object.getOwnPropertyNames(merged),
|
||||
...Object.getOwnPropertySymbols(merged),
|
||||
]) {
|
||||
if (!compare(a && a[key], b && b[key])) {
|
||||
return false;
|
||||
}
|
||||
if (((key in a) && (!(key in b))) || ((key in b) && (!(key in a)))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
seen.set(a, b);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
})(c, d);
|
||||
}
|
||||
/** Make an assertion, error will be thrown if `expr` does not have truthy value. */
|
||||
export function assert(expr, msg = "") {
|
||||
if (!expr) {
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
}
|
||||
export function assertEquals(actual, expected, msg) {
|
||||
if (equal(actual, expected)) {
|
||||
return;
|
||||
}
|
||||
let message = "";
|
||||
const actualString = _format(actual);
|
||||
const expectedString = _format(expected);
|
||||
try {
|
||||
const diffResult = diff(actualString.split("\n"), expectedString.split("\n"));
|
||||
const diffMsg = buildMessage(diffResult).join("\n");
|
||||
message = `Values are not equal:\n${diffMsg}`;
|
||||
}
|
||||
catch {
|
||||
message = `\n${red(CAN_NOT_DISPLAY)} + \n\n`;
|
||||
}
|
||||
if (msg) {
|
||||
message = msg;
|
||||
}
|
||||
throw new AssertionError(message);
|
||||
}
|
||||
export function assertNotEquals(actual, expected, msg) {
|
||||
if (!equal(actual, expected)) {
|
||||
return;
|
||||
}
|
||||
let actualString;
|
||||
let expectedString;
|
||||
try {
|
||||
actualString = String(actual);
|
||||
}
|
||||
catch {
|
||||
actualString = "[Cannot display]";
|
||||
}
|
||||
try {
|
||||
expectedString = String(expected);
|
||||
}
|
||||
catch {
|
||||
expectedString = "[Cannot display]";
|
||||
}
|
||||
if (!msg) {
|
||||
msg = `actual: ${actualString} expected: ${expectedString}`;
|
||||
}
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
export function assertStrictEquals(actual, expected, msg) {
|
||||
if (actual === expected) {
|
||||
return;
|
||||
}
|
||||
let message;
|
||||
if (msg) {
|
||||
message = msg;
|
||||
}
|
||||
else {
|
||||
const actualString = _format(actual);
|
||||
const expectedString = _format(expected);
|
||||
if (actualString === expectedString) {
|
||||
const withOffset = actualString
|
||||
.split("\n")
|
||||
.map((l) => ` ${l}`)
|
||||
.join("\n");
|
||||
message =
|
||||
`Values have the same structure but are not reference-equal:\n\n${red(withOffset)}\n`;
|
||||
}
|
||||
else {
|
||||
try {
|
||||
const diffResult = diff(actualString.split("\n"), expectedString.split("\n"));
|
||||
const diffMsg = buildMessage(diffResult).join("\n");
|
||||
message = `Values are not strictly equal:\n${diffMsg}`;
|
||||
}
|
||||
catch {
|
||||
message = `\n${red(CAN_NOT_DISPLAY)} + \n\n`;
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new AssertionError(message);
|
||||
}
|
||||
export function assertNotStrictEquals(actual, expected, msg) {
|
||||
if (actual !== expected) {
|
||||
return;
|
||||
}
|
||||
throw new AssertionError(msg ?? `Expected "actual" to be strictly unequal to: ${_format(actual)}\n`);
|
||||
}
|
||||
/**
|
||||
* Make an assertion that actual is not null or undefined. If not
|
||||
* then thrown.
|
||||
*/
|
||||
export function assertExists(actual, msg) {
|
||||
if (actual === undefined || actual === null) {
|
||||
if (!msg) {
|
||||
msg =
|
||||
`actual: "${actual}" expected to match anything but null or undefined`;
|
||||
}
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Make an assertion that actual includes expected. If not
|
||||
* then thrown.
|
||||
*/
|
||||
export function assertStringIncludes(actual, expected, msg) {
|
||||
if (!actual.includes(expected)) {
|
||||
if (!msg) {
|
||||
msg = `actual: "${actual}" expected to contain: "${expected}"`;
|
||||
}
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
}
|
||||
export function assertArrayIncludes(actual, expected, msg) {
|
||||
const missing = [];
|
||||
for (let i = 0; i < expected.length; i++) {
|
||||
let found = false;
|
||||
for (let j = 0; j < actual.length; j++) {
|
||||
if (equal(expected[i], actual[j])) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
missing.push(expected[i]);
|
||||
}
|
||||
}
|
||||
if (missing.length === 0) {
|
||||
return;
|
||||
}
|
||||
if (!msg) {
|
||||
msg = `actual: "${_format(actual)}" expected to include: "${_format(expected)}"\nmissing: ${_format(missing)}`;
|
||||
}
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
/**
|
||||
* Make an assertion that `actual` match RegExp `expected`. If not
|
||||
* then thrown
|
||||
*/
|
||||
export function assertMatch(actual, expected, msg) {
|
||||
if (!expected.test(actual)) {
|
||||
if (!msg) {
|
||||
msg = `actual: "${actual}" expected to match: "${expected}"`;
|
||||
}
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Make an assertion that `actual` not match RegExp `expected`. If match
|
||||
* then thrown
|
||||
*/
|
||||
export function assertNotMatch(actual, expected, msg) {
|
||||
if (expected.test(actual)) {
|
||||
if (!msg) {
|
||||
msg = `actual: "${actual}" expected to not match: "${expected}"`;
|
||||
}
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Make an assertion that `actual` object is a subset of `expected` object, deeply.
|
||||
* If not, then throw.
|
||||
*/
|
||||
export function assertObjectMatch(
|
||||
// deno-lint-ignore no-explicit-any
|
||||
actual, expected) {
|
||||
const seen = new WeakMap();
|
||||
return assertEquals((function filter(a, b) {
|
||||
// Prevent infinite loop with circular references with same filter
|
||||
if ((seen.has(a)) && (seen.get(a) === b)) {
|
||||
return a;
|
||||
}
|
||||
seen.set(a, b);
|
||||
// Filter keys and symbols which are present in both actual and expected
|
||||
const filtered = {};
|
||||
const entries = [
|
||||
...Object.getOwnPropertyNames(a),
|
||||
...Object.getOwnPropertySymbols(a),
|
||||
]
|
||||
.filter((key) => key in b)
|
||||
.map((key) => [key, a[key]]);
|
||||
for (const [key, value] of entries) {
|
||||
// On array references, build a filtered array and filter nested objects inside
|
||||
if (Array.isArray(value)) {
|
||||
const subset = b[key];
|
||||
if (Array.isArray(subset)) {
|
||||
filtered[key] = value
|
||||
.slice(0, subset.length)
|
||||
.map((element, index) => {
|
||||
const subsetElement = subset[index];
|
||||
if ((typeof subsetElement === "object") && (subsetElement)) {
|
||||
return filter(element, subsetElement);
|
||||
}
|
||||
return element;
|
||||
});
|
||||
continue;
|
||||
}
|
||||
} // On nested objects references, build a filtered object recursively
|
||||
else if (typeof value === "object") {
|
||||
const subset = b[key];
|
||||
if ((typeof subset === "object") && (subset)) {
|
||||
filtered[key] = filter(value, subset);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
filtered[key] = value;
|
||||
}
|
||||
return filtered;
|
||||
})(actual, expected), expected);
|
||||
}
|
||||
/**
|
||||
* Forcefully throws a failed assertion
|
||||
*/
|
||||
export function fail(msg) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
assert(false, `Failed assertion${msg ? `: ${msg}` : "."}`);
|
||||
}
|
||||
/**
|
||||
* Executes a function, expecting it to throw. If it does not, then it
|
||||
* throws. An error class and a string that should be included in the
|
||||
* error message can also be asserted.
|
||||
*/
|
||||
export function assertThrows(fn, ErrorClass, msgIncludes = "", msg) {
|
||||
let doesThrow = false;
|
||||
let error = null;
|
||||
try {
|
||||
fn();
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error === false) {
|
||||
throw new AssertionError("A non-Error object was thrown.");
|
||||
}
|
||||
if (ErrorClass && !(e instanceof ErrorClass)) {
|
||||
msg =
|
||||
`Expected error to be instance of "${ErrorClass.name}", but was "${e.constructor.name}"${msg ? `: ${msg}` : "."}`;
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
if (msgIncludes &&
|
||||
!stripColor(e.message).includes(stripColor(msgIncludes))) {
|
||||
msg =
|
||||
`Expected error message to include "${msgIncludes}", but got "${e.message}"${msg ? `: ${msg}` : "."}`;
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
doesThrow = true;
|
||||
error = e;
|
||||
}
|
||||
if (!doesThrow) {
|
||||
msg = `Expected function to throw${msg ? `: ${msg}` : "."}`;
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
return error;
|
||||
}
|
||||
/**
|
||||
* Executes a function which returns a promise, expecting it to throw or reject.
|
||||
* If it does not, then it throws. An error class and a string that should be
|
||||
* included in the error message can also be asserted.
|
||||
*/
|
||||
export async function assertThrowsAsync(fn, ErrorClass, msgIncludes = "", msg) {
|
||||
let doesThrow = false;
|
||||
let error = null;
|
||||
try {
|
||||
await fn();
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error === false) {
|
||||
throw new AssertionError("A non-Error object was thrown or rejected.");
|
||||
}
|
||||
if (ErrorClass && !(e instanceof ErrorClass)) {
|
||||
msg =
|
||||
`Expected error to be instance of "${ErrorClass.name}", but got "${e.name}"${msg ? `: ${msg}` : "."}`;
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
if (msgIncludes &&
|
||||
!stripColor(e.message).includes(stripColor(msgIncludes))) {
|
||||
msg =
|
||||
`Expected error message to include "${msgIncludes}", but got "${e.message}"${msg ? `: ${msg}` : "."}`;
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
doesThrow = true;
|
||||
error = e;
|
||||
}
|
||||
if (!doesThrow) {
|
||||
msg = `Expected function to throw${msg ? `: ${msg}` : "."}`;
|
||||
throw new AssertionError(msg);
|
||||
}
|
||||
return error;
|
||||
}
|
||||
/** Use this to stub out methods that will throw when invoked. */
|
||||
export function unimplemented(msg) {
|
||||
throw new AssertionError(msg || "unimplemented");
|
||||
}
|
||||
/** Use this to assert unreachable code. */
|
||||
export function unreachable() {
|
||||
throw new AssertionError("unreachable");
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
import * as builtInMatchers from "./matchers.js";
|
||||
import { AssertionError } from "../../std@0.97.0/testing/asserts.js";
|
||||
const matchers = {
|
||||
...builtInMatchers,
|
||||
};
|
||||
export function expect(value) {
|
||||
let isNot = false;
|
||||
let isPromised = false;
|
||||
const self = new Proxy({}, {
|
||||
get(_, name) {
|
||||
if (name === "not") {
|
||||
isNot = !isNot;
|
||||
return self;
|
||||
}
|
||||
if (name === "resolves") {
|
||||
if (!(value instanceof Promise)) {
|
||||
throw new AssertionError("expected value must be a Promise");
|
||||
}
|
||||
isPromised = true;
|
||||
return self;
|
||||
}
|
||||
if (name === "rejects") {
|
||||
if (!(value instanceof Promise)) {
|
||||
throw new AssertionError("expected value must be a Promise");
|
||||
}
|
||||
value = value.then((value) => {
|
||||
throw new AssertionError(`Promise did not reject. resolved to ${value}`);
|
||||
}, (err) => err);
|
||||
isPromised = true;
|
||||
return self;
|
||||
}
|
||||
const matcher = matchers[name];
|
||||
if (!matcher) {
|
||||
throw new TypeError(typeof name === "string"
|
||||
? `matcher not found: ${name}`
|
||||
: "matcher not found");
|
||||
}
|
||||
return (...args) => {
|
||||
function applyMatcher(value, args) {
|
||||
if (isNot) {
|
||||
let result = matcher(value, ...args);
|
||||
if (result.pass) {
|
||||
throw new AssertionError("should not " + result.message);
|
||||
}
|
||||
}
|
||||
else {
|
||||
let result = matcher(value, ...args);
|
||||
if (!result.pass) {
|
||||
throw new AssertionError(result.message || "Unknown error");
|
||||
}
|
||||
}
|
||||
}
|
||||
return isPromised
|
||||
? value.then((value) => applyMatcher(value, args))
|
||||
: applyMatcher(value, args);
|
||||
};
|
||||
},
|
||||
});
|
||||
return self;
|
||||
}
|
||||
export function addMatchers(newMatchers) {
|
||||
Object.assign(matchers, newMatchers);
|
||||
}
|
||||
@@ -1,351 +0,0 @@
|
||||
import * as dntShim from "../../../../_dnt.test_shims.js";
|
||||
import { equal } from '../../std@0.97.0/testing/asserts.js';
|
||||
import { diff, DiffType } from '../../std@0.97.0/testing/_diff.js';
|
||||
import { bold, green, red, white } from '../../std@0.97.0/fmt/colors.js';
|
||||
import * as mock from './mock.js';
|
||||
const ACTUAL = red(bold('actual'));
|
||||
const EXPECTED = green(bold('expected'));
|
||||
const CAN_NOT_DISPLAY = '[Cannot display]';
|
||||
function createStr(v) {
|
||||
try {
|
||||
return dntShim.Deno.inspect(v);
|
||||
}
|
||||
catch (e) {
|
||||
return red(CAN_NOT_DISPLAY);
|
||||
}
|
||||
}
|
||||
function createColor(diffType) {
|
||||
switch (diffType) {
|
||||
case DiffType.added:
|
||||
return (s) => green(bold(s));
|
||||
case DiffType.removed:
|
||||
return (s) => red(bold(s));
|
||||
default:
|
||||
return white;
|
||||
}
|
||||
}
|
||||
function createSign(diffType) {
|
||||
switch (diffType) {
|
||||
case DiffType.added:
|
||||
return '+ ';
|
||||
case DiffType.removed:
|
||||
return '- ';
|
||||
default:
|
||||
return ' ';
|
||||
}
|
||||
}
|
||||
function buildMessage(diffResult) {
|
||||
return diffResult
|
||||
.map((result) => {
|
||||
const c = createColor(result.type);
|
||||
return c(`${createSign(result.type)}${result.value}`);
|
||||
})
|
||||
.join('\n');
|
||||
}
|
||||
function buildDiffMessage(actual, expected) {
|
||||
const actualString = createStr(actual);
|
||||
const expectedString = createStr(expected);
|
||||
let message;
|
||||
try {
|
||||
const diffResult = diff(actualString.split('\n'), expectedString.split('\n'));
|
||||
return buildMessage(diffResult);
|
||||
}
|
||||
catch (e) {
|
||||
return `\n${red(CAN_NOT_DISPLAY)} + \n\n`;
|
||||
}
|
||||
}
|
||||
function buildFail(message) {
|
||||
return {
|
||||
pass: false,
|
||||
message
|
||||
};
|
||||
}
|
||||
export function toBe(actual, expected) {
|
||||
if (actual === expected)
|
||||
return { pass: true };
|
||||
return buildFail(`expect(${ACTUAL}).toBe(${EXPECTED})\n\n${buildDiffMessage(actual, expected)}`);
|
||||
}
|
||||
export function toEqual(actual, expected) {
|
||||
if (equal(actual, expected))
|
||||
return { pass: true };
|
||||
return buildFail(`expect(${ACTUAL}).toEqual(${EXPECTED})\n\n${buildDiffMessage(actual, expected)}`);
|
||||
}
|
||||
export function toBeGreaterThan(actual, comparison) {
|
||||
if (actual > comparison)
|
||||
return { pass: true };
|
||||
const actualString = createStr(actual);
|
||||
const comparisonString = createStr(comparison);
|
||||
return buildFail(`expect(${ACTUAL}).toBeGreaterThan(${EXPECTED})\n\n ${red(actualString)} is not greater than ${green(comparisonString)}`);
|
||||
}
|
||||
export function toBeLessThan(actual, comparison) {
|
||||
if (actual < comparison)
|
||||
return { pass: true };
|
||||
const actualString = createStr(actual);
|
||||
const comparisonString = createStr(comparison);
|
||||
return buildFail(`expect(${ACTUAL}).toBeLessThan(${EXPECTED})\n\n ${red(actualString)} is not less than ${green(comparisonString)}`);
|
||||
}
|
||||
export function toBeGreaterThanOrEqual(actual, comparison) {
|
||||
if (actual >= comparison)
|
||||
return { pass: true };
|
||||
const actualString = createStr(actual);
|
||||
const comparisonString = createStr(comparison);
|
||||
return buildFail(`expect(${ACTUAL}).toBeGreaterThanOrEqual(${EXPECTED})\n\n ${red(actualString)} is not greater than or equal to ${green(comparisonString)}`);
|
||||
}
|
||||
export function toBeLessThanOrEqual(actual, comparison) {
|
||||
if (actual <= comparison)
|
||||
return { pass: true };
|
||||
const actualString = createStr(actual);
|
||||
const comparisonString = createStr(comparison);
|
||||
return buildFail(`expect(${ACTUAL}).toBeLessThanOrEqual(${EXPECTED})\n\n ${red(actualString)} is not less than or equal to ${green(comparisonString)}`);
|
||||
}
|
||||
export function toBeTruthy(value) {
|
||||
if (value)
|
||||
return { pass: true };
|
||||
const actualString = createStr(value);
|
||||
return buildFail(`expect(${ACTUAL}).toBeTruthy()
|
||||
|
||||
${red(actualString)} is not truthy`);
|
||||
}
|
||||
export function toBeFalsy(value) {
|
||||
if (!value)
|
||||
return { pass: true };
|
||||
const actualString = createStr(value);
|
||||
return buildFail(`expect(${ACTUAL}).toBeFalsy()\n\n ${red(actualString)} is not falsy`);
|
||||
}
|
||||
export function toBeDefined(value) {
|
||||
if (typeof value !== 'undefined')
|
||||
return { pass: true };
|
||||
const actualString = createStr(value);
|
||||
return buildFail(`expect(${ACTUAL}).toBeDefined()\n\n ${red(actualString)} is not defined`);
|
||||
}
|
||||
export function toBeUndefined(value) {
|
||||
if (typeof value === 'undefined')
|
||||
return { pass: true };
|
||||
const actualString = createStr(value);
|
||||
return buildFail(`expect(${ACTUAL}).toBeUndefined()\n\n ${red(actualString)} is defined but should be undefined`);
|
||||
}
|
||||
export function toBeNull(value) {
|
||||
if (value === null)
|
||||
return { pass: true };
|
||||
const actualString = createStr(value);
|
||||
return buildFail(`expect(${ACTUAL}).toBeNull()\n\n ${red(actualString)} should be null`);
|
||||
}
|
||||
export function toBeNaN(value) {
|
||||
if (typeof value === 'number' && isNaN(value))
|
||||
return { pass: true };
|
||||
const actualString = createStr(value);
|
||||
return buildFail(`expect(${ACTUAL}).toBeNaN()\n\n ${red(actualString)} should be NaN`);
|
||||
}
|
||||
export function toBeInstanceOf(value, expected) {
|
||||
if (value instanceof expected)
|
||||
return { pass: true };
|
||||
const actualString = createStr(value);
|
||||
const expectedString = createStr(expected);
|
||||
return buildFail(`expect(${ACTUAL}).toBeInstanceOf(${EXPECTED})\n\n expected ${green(expected.name)} but received ${red(actualString)}`);
|
||||
}
|
||||
export function toMatch(value, pattern) {
|
||||
const valueStr = value.toString();
|
||||
if (typeof pattern === 'string') {
|
||||
if (valueStr.indexOf(pattern) !== -1)
|
||||
return { pass: true };
|
||||
const actualString = createStr(value);
|
||||
const patternString = createStr(pattern);
|
||||
return buildFail(`expect(${ACTUAL}).toMatch(${EXPECTED})\n\n expected ${red(actualString)} to contain ${green(patternString)}`);
|
||||
}
|
||||
else if (pattern instanceof RegExp) {
|
||||
if (pattern.exec(valueStr))
|
||||
return { pass: true };
|
||||
const actualString = createStr(value);
|
||||
const patternString = createStr(pattern);
|
||||
return buildFail(`expect(${ACTUAL}).toMatch(${EXPECTED})\n\n ${red(actualString)} did not match regex ${green(patternString)}`);
|
||||
}
|
||||
else {
|
||||
return buildFail('Invalid internal state');
|
||||
}
|
||||
}
|
||||
export function toHaveProperty(value, propName) {
|
||||
if (typeof value === 'object' && typeof value[propName] !== 'undefined') {
|
||||
return { pass: true };
|
||||
}
|
||||
const actualString = createStr(value);
|
||||
const propNameString = createStr(propName);
|
||||
return buildFail(`expect(${ACTUAL}).toHaveProperty(${EXPECTED})\n\n ${red(actualString)} did not contain property ${green(propNameString)}`);
|
||||
}
|
||||
export function toHaveLength(value, length) {
|
||||
if (value?.length === length)
|
||||
return { pass: true };
|
||||
const actualString = createStr(value.length);
|
||||
const lengthString = createStr(length);
|
||||
return buildFail(`expect(${ACTUAL}).toHaveLength(${EXPECTED})\n\n expected array to have length ${green(lengthString)} but was ${red(actualString)}`);
|
||||
}
|
||||
export function toContain(value, item) {
|
||||
if (value && typeof value.includes === 'function' && value.includes(item)) {
|
||||
return { pass: true };
|
||||
}
|
||||
const actualString = createStr(value);
|
||||
const itemString = createStr(item);
|
||||
if (value && typeof value.includes === 'function') {
|
||||
return buildFail(`expect(${ACTUAL}).toContain(${EXPECTED})\n\n ${red(actualString)} did not contain ${green(itemString)}`);
|
||||
}
|
||||
else {
|
||||
return buildFail(`expect(${ACTUAL}).toContain(${EXPECTED})\n\n expected ${red(actualString)} to have an includes method but it is ${green(itemString)}`);
|
||||
}
|
||||
}
|
||||
export function toThrow(value, error) {
|
||||
let fn;
|
||||
if (typeof value === 'function') {
|
||||
fn = value;
|
||||
try {
|
||||
value = value();
|
||||
}
|
||||
catch (err) {
|
||||
value = err;
|
||||
}
|
||||
}
|
||||
const actualString = createStr(fn);
|
||||
const errorString = createStr(error);
|
||||
if (value instanceof Error) {
|
||||
if (typeof error === 'string') {
|
||||
if (!value.message.includes(error)) {
|
||||
return buildFail(`expect(${ACTUAL}).toThrow(${EXPECTED})\n\nexpected ${red(actualString)} to throw error matching ${green(errorString)} but it threw ${red(value.toString())}`);
|
||||
}
|
||||
}
|
||||
else if (error instanceof RegExp) {
|
||||
if (!value.message.match(error)) {
|
||||
return buildFail(`expect(${ACTUAL}).toThrow(${EXPECTED})\n\nexpected ${red(actualString)} to throw error matching ${green(errorString)} but it threw ${red(value.toString())}`);
|
||||
}
|
||||
}
|
||||
return { pass: true };
|
||||
}
|
||||
else {
|
||||
return buildFail(`expect(${ACTUAL}).toThrow(${EXPECTED})\n\nexpected ${red(actualString)} to throw but it did not`);
|
||||
}
|
||||
}
|
||||
function extractMockCalls(value, name) {
|
||||
if (typeof value !== 'function') {
|
||||
return {
|
||||
calls: null,
|
||||
error: `${name} only works on mock functions. received: ${value}`
|
||||
};
|
||||
}
|
||||
const calls = mock.calls(value);
|
||||
if (calls === null) {
|
||||
return { calls: null, error: `${name} only works on mock functions` };
|
||||
}
|
||||
return { calls };
|
||||
}
|
||||
export function toHaveBeenCalled(value) {
|
||||
const { calls, error } = extractMockCalls(value, 'toHaveBeenCalled');
|
||||
if (error)
|
||||
return buildFail(error);
|
||||
const actualString = createStr(value);
|
||||
if (calls && calls.length !== 0)
|
||||
return { pass: true };
|
||||
return buildFail(`expect(${ACTUAL}).toHaveBeenCalled()\n\n ${red(actualString)} was not called`);
|
||||
}
|
||||
export function toHaveBeenCalledTimes(value, times) {
|
||||
const { calls, error } = extractMockCalls(value, 'toHaveBeenCalledTimes');
|
||||
if (error)
|
||||
return buildFail(error);
|
||||
if (!calls)
|
||||
return buildFail('Invalid internal state');
|
||||
if (calls && calls.length === times)
|
||||
return { pass: true };
|
||||
return buildFail(`expect(${ACTUAL}).toHaveBeenCalledTimes(${EXPECTED})\n\n expected ${times} calls but was called: ${calls.length}`);
|
||||
}
|
||||
export function toHaveBeenCalledWith(value, ...args) {
|
||||
const { calls, error } = extractMockCalls(value, 'toHaveBeenCalledWith');
|
||||
if (error)
|
||||
return buildFail(error);
|
||||
const wasCalledWith = calls && calls.some((c) => equal(c.args, args));
|
||||
if (wasCalledWith)
|
||||
return { pass: true };
|
||||
const argsString = createStr(args);
|
||||
return buildFail(`expect(${ACTUAL}).toHaveBeenCalledWith(${EXPECTED})\n\n function was not called with: ${green(argsString)}`);
|
||||
}
|
||||
export function toHaveBeenLastCalledWith(value, ...args) {
|
||||
const { calls, error } = extractMockCalls(value, 'toHaveBeenLastCalledWith');
|
||||
if (error)
|
||||
return buildFail(error);
|
||||
if (!calls || !calls.length) {
|
||||
return buildFail(`expect(${ACTUAL}).toHaveBeenLastCalledWith(...${EXPECTED})\n\n expect last call args to be ${args} but was not called`);
|
||||
}
|
||||
const lastCall = calls[calls.length - 1];
|
||||
if (equal(lastCall.args, args))
|
||||
return { pass: true };
|
||||
return buildFail(`expect(${ACTUAL}).toHaveBeenLastCalledWith(...${EXPECTED})\n\n expect last call args to be ${args} but was: ${lastCall.args}`);
|
||||
}
|
||||
export function toHaveBeenNthCalledWith(value, nth, ...args) {
|
||||
const { calls, error } = extractMockCalls(value, 'toHaveBeenNthCalledWith');
|
||||
if (error)
|
||||
return buildFail(error);
|
||||
const nthCall = calls && calls[nth - 1];
|
||||
if (nthCall) {
|
||||
if (equal(nthCall.args, args))
|
||||
return { pass: true };
|
||||
return buildFail(`expect(${ACTUAL}).toHaveBeenNthCalledWith(${EXPECTED})\n\n expect ${nth}th call args to be ${args} but was: ${nthCall.args}`);
|
||||
}
|
||||
else {
|
||||
return buildFail(`expect(${ACTUAL}).toHaveBeenNthCalledWith(${EXPECTED})\n\n ${nth}th call was not made.`);
|
||||
}
|
||||
}
|
||||
export function toHaveReturnedWith(value, result) {
|
||||
const { calls, error } = extractMockCalls(value, 'toHaveReturnedWith');
|
||||
if (error)
|
||||
return buildFail(error);
|
||||
const wasReturnedWith = calls && calls.some((c) => c.returns && equal(c.returned, result));
|
||||
if (wasReturnedWith)
|
||||
return { pass: true };
|
||||
return buildFail(`expect(${ACTUAL}).toHaveReturnedWith(${EXPECTED})\n\n function did not return: ${result}`);
|
||||
}
|
||||
export function toHaveReturned(value) {
|
||||
const { calls, error } = extractMockCalls(value, 'toHaveReturned');
|
||||
if (error)
|
||||
return buildFail(error);
|
||||
if (calls && calls.some((c) => c.returns))
|
||||
return { pass: true };
|
||||
// TODO(allain): better messages
|
||||
return buildFail(`expected function to return but it never did`);
|
||||
}
|
||||
// TODO(allain): better messages
|
||||
export function toHaveLastReturnedWith(value, expected) {
|
||||
const { calls, error } = extractMockCalls(value, 'toHaveLastReturnedWith');
|
||||
if (error)
|
||||
return buildFail(error);
|
||||
const lastCall = calls && calls[calls.length - 1];
|
||||
if (!lastCall) {
|
||||
return buildFail('no calls made to function');
|
||||
}
|
||||
if (lastCall.throws) {
|
||||
return buildFail(`last call to function threw: ${lastCall.thrown}`);
|
||||
}
|
||||
if (equal(lastCall.returned, expected))
|
||||
return { pass: true };
|
||||
return buildFail(`expected last call to return ${expected} but returned: ${lastCall.returned}`);
|
||||
}
|
||||
export function toHaveReturnedTimes(value, times) {
|
||||
const { calls, error } = extractMockCalls(value, 'toHaveReturnedTimes');
|
||||
if (error)
|
||||
return buildFail(error);
|
||||
const returnCount = calls && calls.filter((c) => c.returns).length;
|
||||
if (returnCount !== times) {
|
||||
return buildFail(`expected ${times} returned times but returned ${returnCount} times`);
|
||||
}
|
||||
return { pass: true };
|
||||
}
|
||||
export function toHaveNthReturnedWith(value, nth, expected) {
|
||||
const { calls, error } = extractMockCalls(value, 'toHaveNthReturnedWith');
|
||||
if (error)
|
||||
return buildFail(error);
|
||||
const nthCall = calls && calls[nth - 1];
|
||||
if (!nthCall) {
|
||||
return buildFail(`${nth} calls were now made`);
|
||||
}
|
||||
if (nthCall.throws) {
|
||||
return buildFail(`${nth}th call to function threw: ${nthCall.thrown}`);
|
||||
}
|
||||
if (!equal(nthCall.returned, expected)) {
|
||||
return buildFail(`expected ${nth}th call to return ${expected} but returned: ${nthCall.returned}`);
|
||||
}
|
||||
return { pass: true };
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
const MOCK_SYMBOL = Symbol.for("@MOCK");
|
||||
export function fn(...stubs) {
|
||||
const calls = [];
|
||||
const f = (...args) => {
|
||||
const stub = stubs.length === 1
|
||||
? // keep reusing the first
|
||||
stubs[0]
|
||||
: // pick the exact mock for the current call
|
||||
stubs[calls.length];
|
||||
try {
|
||||
const returned = stub ? stub(...args) : undefined;
|
||||
calls.push({
|
||||
args,
|
||||
returned,
|
||||
timestamp: Date.now(),
|
||||
returns: true,
|
||||
throws: false,
|
||||
});
|
||||
return returned;
|
||||
}
|
||||
catch (err) {
|
||||
calls.push({
|
||||
args,
|
||||
timestamp: Date.now(),
|
||||
returns: false,
|
||||
thrown: err,
|
||||
throws: true,
|
||||
});
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
Object.defineProperty(f, MOCK_SYMBOL, {
|
||||
value: { calls },
|
||||
writable: false,
|
||||
});
|
||||
return f;
|
||||
}
|
||||
export function calls(f) {
|
||||
const mockInfo = f[MOCK_SYMBOL];
|
||||
if (!mockInfo)
|
||||
throw new Error("callCount only available on mock functions");
|
||||
return [...mockInfo.calls];
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import * as dntShim from "../../../../_dnt.test_shims.js";
|
||||
import * as m from "./mock.js";
|
||||
export const mock = m;
|
||||
export * from "./expect.js";
|
||||
export function it(name, fn) {
|
||||
dntShim.Deno.test({
|
||||
name,
|
||||
fn,
|
||||
});
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
export * from "./src/matches.js";
|
||||
import matches from "./src/matches.js";
|
||||
export { AnyParser } from "./src/parsers/any-parser.js";
|
||||
export { ArrayParser } from "./src/parsers/array-parser.js";
|
||||
export { BoolParser } from "./src/parsers/bool-parser.js";
|
||||
export { FunctionParser } from "./src/parsers/function-parser.js";
|
||||
export { GuardParser } from "./src/parsers/guard-parser.js";
|
||||
export { NilParser } from "./src/parsers/nill-parser.js";
|
||||
export { NumberParser } from "./src/parsers/number-parser.js";
|
||||
export { ObjectParser } from "./src/parsers/object-parser.js";
|
||||
export { OrParsers } from "./src/parsers/or-parser.js";
|
||||
export { ShapeParser } from "./src/parsers/shape-parser.js";
|
||||
export { StringParser } from "./src/parsers/string-parser.js";
|
||||
export { saferStringify } from "./src/utils.js";
|
||||
export { NamedParser } from "./src/parsers/named.js";
|
||||
export { ArrayOfParser } from "./src/parsers/array-of-parser.js";
|
||||
export { LiteralsParser } from "./src/parsers/literal-parser.js";
|
||||
export { ConcatParsers } from "./src/parsers/concat-parser.js";
|
||||
export { MappedAParser } from "./src/parsers/mapped-parser.js";
|
||||
export default matches;
|
||||
@@ -1,137 +0,0 @@
|
||||
import { any, arrayOf, boolean, deferred, dictionary, every, guard, instanceOf, isArray, isFunction, isNill, literal, literals, natural, number, object, Parser, partial, recursive, regex, shape, some, string, tuple, } from "./parsers/index.js";
|
||||
import { parserName } from "./parsers/named.js";
|
||||
import { unknown } from "./parsers/simple-parsers.js";
|
||||
export { Parser as Validator };
|
||||
class Matched {
|
||||
constructor(value) {
|
||||
Object.defineProperty(this, "value", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: value
|
||||
});
|
||||
Object.defineProperty(this, "when", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: ((..._args) => {
|
||||
// deno-lint-ignore no-explicit-any
|
||||
return this;
|
||||
// deno-lint-ignore no-explicit-any
|
||||
})
|
||||
});
|
||||
Object.defineProperty(this, "unwrap", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: (() => {
|
||||
return this.value;
|
||||
// deno-lint-ignore no-explicit-any
|
||||
})
|
||||
});
|
||||
}
|
||||
defaultTo(_defaultValue) {
|
||||
return this.value;
|
||||
}
|
||||
defaultToLazy(_getValue) {
|
||||
return this.value;
|
||||
}
|
||||
}
|
||||
class MatchMore {
|
||||
constructor(a) {
|
||||
Object.defineProperty(this, "a", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: a
|
||||
});
|
||||
Object.defineProperty(this, "when", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: ((...args) => {
|
||||
const [outcome, ...matchers] = args.reverse();
|
||||
// deno-lint-ignore no-this-alias
|
||||
const me = this;
|
||||
const parser = matches.some(...matchers.map((matcher) =>
|
||||
// deno-lint-ignore no-explicit-any
|
||||
matcher instanceof Parser ? matcher : literal(matcher)));
|
||||
const result = parser.enumParsed(this.a);
|
||||
if ("error" in result) {
|
||||
// deno-lint-ignore no-explicit-any
|
||||
return me;
|
||||
}
|
||||
const { value } = result;
|
||||
if (outcome instanceof Function) {
|
||||
// deno-lint-ignore no-explicit-any
|
||||
return new Matched(outcome(value));
|
||||
}
|
||||
// deno-lint-ignore no-explicit-any
|
||||
return new Matched(outcome);
|
||||
// deno-lint-ignore no-explicit-any
|
||||
})
|
||||
});
|
||||
Object.defineProperty(this, "unwrap", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: (() => {
|
||||
throw new Error("Expecting that value is matched");
|
||||
// deno-lint-ignore no-explicit-any
|
||||
})
|
||||
});
|
||||
}
|
||||
defaultTo(value) {
|
||||
return value;
|
||||
}
|
||||
defaultToLazy(getValue) {
|
||||
return getValue();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Want to be able to bring in the declarative nature that a functional programming
|
||||
* language feature of the pattern matching and the switch statement. With the destructors
|
||||
* the only thing left was to find the correct structure then move move forward.
|
||||
* Using a structure in chainable fashion allows for a syntax that works with typescript
|
||||
* while looking similar to matches statements in other languages
|
||||
*
|
||||
* Use: matches('a value').when(matches.isNumber, (aNumber) => aNumber + 4).defaultTo('fallback value')
|
||||
*/
|
||||
export const matches = Object.assign(function matchesFn(value) {
|
||||
return new MatchMore(value);
|
||||
}, {
|
||||
array: isArray,
|
||||
arrayOf,
|
||||
some,
|
||||
tuple,
|
||||
regex,
|
||||
number,
|
||||
natural,
|
||||
isFunction,
|
||||
object,
|
||||
string,
|
||||
shape,
|
||||
partial,
|
||||
literal,
|
||||
every,
|
||||
guard,
|
||||
unknown,
|
||||
any,
|
||||
boolean,
|
||||
dictionary,
|
||||
literals,
|
||||
nill: isNill,
|
||||
instanceOf,
|
||||
Parse: Parser,
|
||||
parserName,
|
||||
recursive,
|
||||
deferred,
|
||||
});
|
||||
const array = isArray;
|
||||
const nill = isNill;
|
||||
const Parse = Parser;
|
||||
const oneOf = some;
|
||||
const anyOf = some;
|
||||
const allOf = every;
|
||||
export { allOf, any, anyOf, array, arrayOf, boolean, deferred, dictionary, every, guard, instanceOf, isFunction, literal, literals, natural, nill, number, object, oneOf, Parse, Parser, parserName, partial, recursive, regex, shape, some, string, tuple, unknown, };
|
||||
export default matches;
|
||||
@@ -1,17 +0,0 @@
|
||||
export class AnyParser {
|
||||
constructor(description = {
|
||||
name: "Any",
|
||||
children: [],
|
||||
extras: [],
|
||||
}) {
|
||||
Object.defineProperty(this, "description", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: description
|
||||
});
|
||||
}
|
||||
parse(a, onParse) {
|
||||
return onParse.parsed(a);
|
||||
}
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
// deno-lint-ignore-file no-explicit-any
|
||||
import { Parser } from "./index.js";
|
||||
/**
|
||||
* Given an object, we want to make sure the key exists and that the value on
|
||||
* the key matches the parser
|
||||
* Note: This will mutate the value sent through
|
||||
*/
|
||||
export class ArrayOfParser {
|
||||
constructor(parser, description = {
|
||||
name: "ArrayOf",
|
||||
children: [parser],
|
||||
extras: [],
|
||||
}) {
|
||||
Object.defineProperty(this, "parser", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: parser
|
||||
});
|
||||
Object.defineProperty(this, "description", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: description
|
||||
});
|
||||
}
|
||||
parse(a, onParse) {
|
||||
if (!Array.isArray(a)) {
|
||||
return onParse.invalid({
|
||||
value: a,
|
||||
keys: [],
|
||||
parser: this,
|
||||
});
|
||||
}
|
||||
const values = [...a];
|
||||
for (let index = 0; index < values.length; index++) {
|
||||
const result = this.parser.enumParsed(values[index]);
|
||||
if ("error" in result) {
|
||||
result.error.keys.push("" + index);
|
||||
return onParse.invalid(result.error);
|
||||
}
|
||||
else {
|
||||
values[index] = result.value;
|
||||
}
|
||||
}
|
||||
return onParse.parsed(values);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* We would like to validate that all of the array is of the same type
|
||||
* @param validator What is the validator for the values in the array
|
||||
*/
|
||||
export function arrayOf(validator) {
|
||||
return new Parser(new ArrayOfParser(validator));
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user