feat: Worked with matt to create all the things

This commit is contained in:
BluJ
2023-03-02 13:25:07 -07:00
parent de043ddd9e
commit 19504ef559
38 changed files with 990 additions and 1160 deletions

View File

@@ -1,10 +1,10 @@
TS_FILES := $(shell find ./**/*.ts )
version = $(shell git tag --sort=committerdate | tail -1)
test: $(TS_FILES) utils/test/output.ts
test: $(TS_FILES)
npm test
utils/test/output.ts: utils/test/config.json scripts/oldSpecToBuilder.ts
cat utils/test/config.json | deno run scripts/oldSpecToBuilder.ts "../../mod" |deno fmt - > utils/test/output.ts
# utils/test/output.ts: utils/test/config.json scripts/oldSpecToBuilder.ts
# cat utils/test/config.json | deno run scripts/oldSpecToBuilder.ts "../../mod" |deno fmt - > utils/test/output.ts
bundle: fmt $(TS_FILES) .FORCE node_modules
rm -rf dist || true
@@ -18,5 +18,13 @@ node_modules: package.json
npm install
publish: bundle
npm publish
cp package.json dist/package.json
cp README.md dist/README.md
cp LICENSE dist/LICENSE
cd dist && npm publish
link: bundle
cp package.json dist/package.json
cp README.md dist/README.md
cp LICENSE dist/LICENSE
cd dist && npm link
.FORCE:

View File

@@ -3,4 +3,6 @@ module.exports = {
preset: "ts-jest",
automock: false,
testEnvironment: "node",
rootDir: "./lib/",
modulePathIgnorePatterns: ["./dist/"],
};

View File

@@ -1,5 +1,5 @@
import { ok } from "./util";
import * as T from "./types";
import { ok } from "../util";
import * as T from "../types";
export const DEFAULT_OPTIONS: T.BackupOptions = {
delete: true,

View File

@@ -1,82 +0,0 @@
import { Config } from "../config_builder/config";
import * as matches from "ts-matches";
import * as YAML from "yaml";
import { LegacyExpectedExports as ExpectedExports } from "../types";
import { ConfigSpec } from "../types/config-types";
import { TypeFromProps, typeFromProps } from "../utils/propertiesMatcher";
const { any, string, dictionary } = matches;
const matchConfig = dictionary([string, any]);
/**
* Call with the configuration to get a standard getConfig for the expected exports
* Assumption: start9/config.yaml is where the config will be stored
* Throws: Error if there is no file
* Throws: Error if the config.yaml isn't yaml nor config shape
* @param spec
* @returns
*/
export const getConfig =
<A extends ConfigSpec>(spec: Config<A>): ExpectedExports.getConfig =>
async (effects) => {
const config = await effects
.readFile({
path: "start9/config.yaml",
volumeId: "main",
})
.then((x) => YAML.parse(x))
.then((x) => matchConfig.unsafeCast(x))
.catch((e) => {
effects.info(`Got error ${e} while trying to read the config`);
return undefined;
});
return {
result: {
config,
spec: spec.build(),
},
};
};
/**
* Call with the configuration to get a standard getConfig for the expected exports
* Assumption: start9/config.yaml is where the config will be stored
* Throws: Error if there is no file
* Throws: Error if the config.yaml isn't yaml nor config shape
* @param spec
* @returns A funnction for getConfig and the matcher for the spec sent in
*/
export const getConfigAndMatcher = <Spec extends ConfigSpec>(
spec: Config<Spec>
): [
ExpectedExports.getConfig,
matches.Parser<unknown, TypeFromProps<Spec>>
] => {
const specBuilt: Spec = spec.build();
return [
async (effects) => {
const config = await effects
.readFile({
path: "start9/config.yaml",
volumeId: "main",
})
.then((x) => YAML.parse(x))
.then((x) => matchConfig.unsafeCast(x))
.catch((e) => {
effects.info(`Got error ${e} while trying to read the config`);
return undefined;
});
return {
result: {
config,
spec: specBuilt,
},
};
},
typeFromProps(specBuilt),
];
};

View File

@@ -1,132 +0,0 @@
import { getConfig, setConfig } from "./mod";
import * as T from "../types";
import { LegacyExpectedExports as ExpectedExports } from "../types";
import * as M from "../migrations";
import * as util from "../util";
import { EmVer } from "../emver-lite/mod";
import { ConfigSpec } from "../types/config-types";
import { Config } from "../config_builder/mod";
export interface NoRepeat<version extends string, type extends "up" | "down"> {
version: version;
type: type;
}
/**
* @param fn function making desired modifications to the config
* @param configured whether or not the service should be considered "configured"
* @param noRepeat (optional) supply the version and type of the migration
* @param noFail (optional, default:false) whether or not to fail the migration if fn throws an error
* @returns a migraion function
*/
export function updateConfig<
version extends string,
type extends "up" | "down"
>(
fn: (
config: Record<string, unknown>,
effects: T.Effects
) => ConfigSpec | Promise<ConfigSpec>,
configured: boolean,
noRepeat?: NoRepeat<version, type>,
noFail = false
): M.MigrationFn<version, type> {
return M.migrationFn(async (effects: T.Effects) => {
await noRepeatGuard(effects, noRepeat, async () => {
let config = util.unwrapResultType(
await getConfig(Config.of({}))(effects)
).config;
if (config) {
try {
config = await fn(config, effects);
} catch (e) {
if (!noFail) {
throw e;
} else {
configured = false;
}
}
util.unwrapResultType(await setConfig(effects, config));
}
});
return { configured };
});
}
export async function noRepeatGuard<
version extends string,
type extends "up" | "down"
>(
effects: T.Effects,
noRepeat: NoRepeat<version, type> | undefined,
fn: () => Promise<void>
): Promise<void> {
if (!noRepeat) {
return fn();
}
if (
!(await util.exists(effects, {
path: "start9/migrations",
volumeId: "main",
}))
) {
await effects.createDir({ path: "start9/migrations", volumeId: "main" });
}
const migrationPath = {
path: `start9/migrations/${noRepeat.version}.complete`,
volumeId: "main",
};
if (noRepeat.type === "up") {
if (!(await util.exists(effects, migrationPath))) {
await fn();
await effects.writeFile({ ...migrationPath, toWrite: "" });
}
} else if (noRepeat.type === "down") {
if (await util.exists(effects, migrationPath)) {
await fn();
await effects.removeFile(migrationPath);
}
}
}
export async function initNoRepeat<versions extends string>(
effects: T.Effects,
migrations: M.MigrationMapping<versions>,
startingVersion: string
) {
if (
!(await util.exists(effects, {
path: "start9/migrations",
volumeId: "main",
}))
) {
const starting = EmVer.parse(startingVersion);
await effects.createDir({ path: "start9/migrations", volumeId: "main" });
for (const version in migrations) {
const migrationVersion = EmVer.parse(version);
if (migrationVersion.lessThanOrEqual(starting)) {
await effects.writeFile({
path: `start9/migrations/${version}.complete`,
volumeId: "main",
toWrite: "",
});
}
}
}
}
export function fromMapping<versions extends string>(
migrations: M.MigrationMapping<versions>,
currentVersion: string
): ExpectedExports.migration {
const inner = M.fromMapping(migrations, currentVersion);
return async (effects: T.Effects, version: string, direction?: unknown) => {
await initNoRepeat(
effects,
migrations,
direction === "from" ? version : currentVersion
);
return inner(effects, version, direction);
};
}

View File

@@ -1,4 +0,0 @@
export { noPropertiesFound, properties, propertiesv2 } from "./properties";
export { setConfig } from "./setConfig";
export { getConfig, getConfigAndMatcher } from "./getConfig";
export * as migrations from "./migrations";

View File

@@ -1,73 +0,0 @@
import * as YAML from "yaml";
import { exists } from "../util";
import {
Effects,
ExpectedExports,
LegacyExpectedExports,
Properties,
ResultType,
} from "../types";
// deno-lint-ignore no-explicit-any
const asResult = (result: any) => ({ result: result as Properties });
export const noPropertiesFound: ResultType<Properties> = {
result: {
version: 2,
data: {
"Not Ready": {
type: "string",
value: "Could not find properties. The service might still be starting",
qr: false,
copyable: false,
masked: false,
description: "Fallback Message When Properties could not be found",
},
},
},
} as const;
/**
* Default will pull from a file (start9/stats.yaml) expected to be made on the main volume
* Assumption: start9/stats.yaml is created by some process
* Throws: stats.yaml isn't yaml
* @param effects
* @returns
*/
export const properties: LegacyExpectedExports.properties = async (
effects: Effects
) => {
if (
(await exists(effects, { path: "start9/stats.yaml", volumeId: "main" })) ===
false
) {
return noPropertiesFound;
}
return await effects
.readFile({
path: "start9/stats.yaml",
volumeId: "main",
})
.then(YAML.parse)
.then(asResult);
};
/**
* Default will pull from a file (start9/stats.yaml) expected to be made on the main volume
* Assumption: start9/stats.yaml is created by some process
* Throws: stats.yaml isn't yaml
* @param effects
* @returns
*/
export const propertiesv2: ExpectedExports.properties = async ({ effects }) => {
if (
(await exists(effects, { path: "start9/stats.yaml", volumeId: "main" })) ===
false
) {
return noPropertiesFound;
}
return await effects
.readFile({
path: "start9/stats.yaml",
volumeId: "main",
})
.then(YAML.parse)
.then(asResult);
};

View File

@@ -1,38 +0,0 @@
import * as YAML from "yaml";
import {
DependsOn,
Effects,
LegacyExpectedExports as ExpectedExports,
} from "../types";
import { okOf } from "../util";
/**
* Will set the config to the default start9/config.yaml
* Assumption: start9/config.yaml is the location of the configuration
* @param effects
* @param newConfig Config to be written to start9/config.yaml
* @param depends_on This would be the depends on for condition depends_on
* @returns
*/
export const setConfig = async (
effects: Effects,
newConfig: Record<string, unknown>,
dependsOn: DependsOn = {}
) => {
await effects.createDir({
path: "start9",
volumeId: "main",
});
await effects.writeFile({
path: "start9/config.yaml",
toWrite: YAML.stringify(newConfig),
volumeId: "main",
});
return okOf({
signal: "SIGTERM",
"depends-on": dependsOn,
} as const);
};
const _typeConversionCheck: ExpectedExports.setConfig = setConfig;

View File

@@ -1,5 +1,5 @@
import { ConfigSpec, ValueSpec } from "../types/config-types";
import { typeFromProps } from "../util";
import { ConfigSpec, ValueSpec } from "../../types/config-types";
import { typeFromProps } from "../../util";
import { BuilderExtract, IBuilder } from "./builder";
import { Value } from "./value";

View File

@@ -8,7 +8,7 @@ import {
UniqueBy,
ValueSpecList,
ValueSpecListOf,
} from "../types/config-types";
} from "../../types/config-types";
/**
* Used as a subtype of Value.list

View File

@@ -9,7 +9,7 @@ import {
ValueSpecList,
ValueSpecNumber,
ValueSpecString,
} from "../types/config-types";
} from "../../types/config-types";
export type DefaultString =
| string

View File

@@ -1,6 +1,6 @@
import { ConfigSpec } from "../types/config-types";
import { ConfigSpec } from "../../types/config-types";
import { BuilderExtract, IBuilder } from "./builder";
import { Config } from "./mod";
import { Config } from ".";
/**
* Used in the the Value.enum { @link './value.ts' }

3
lib/config/index.ts Normal file
View File

@@ -0,0 +1,3 @@
export * as configBuilder from "./builder";
export { setupConfigExports } from "./setup_config_export";

View File

@@ -1,17 +1,14 @@
import { Config } from "../config_builder/mod";
import { DependsOn, Effects, ExpectedExports } from "../types";
import { Config } from "./builder";
import { DeepPartial, DependsOn, Effects, ExpectedExports } from "../types";
import { ConfigSpec } from "../types/config-types";
import { okOf } from "../util";
import { TypeFromProps } from "../utils/propertiesMatcher";
import { nullIfEmpty } from "./mod";
import { nullIfEmpty, okOf } from "../util";
import { TypeFromProps } from "../util/propertiesMatcher";
export function setupConfigExports<A extends ConfigSpec>(options: {
spec: Config<A>;
dependsOn: DependsOn;
write(effects: Effects, config: TypeFromProps<A>): Promise<null>;
read(
effects: Effects
): Promise<Record<string | number, never> | TypeFromProps<A>>;
write(effects: Effects, config: TypeFromProps<A>): Promise<void>;
read(effects: Effects): Promise<null | DeepPartial<TypeFromProps<A>>>;
}) {
const validator = options.spec.validator();
return {
@@ -34,3 +31,5 @@ export function setupConfigExports<A extends ConfigSpec>(options: {
}) as ExpectedExports.getConfig,
};
}
export default setupConfigExports;

View File

@@ -1,155 +0,0 @@
import * as matches from "ts-matches";
import * as YAML from "yaml";
import * as TOML from "@iarna/toml";
import * as T from "../types";
import { exists } from "../util";
const previousPath = /(.+?)\/([^/]*)$/;
/**
* Used in the get config and the set config exported functions.
* The idea is that we are going to be reading/ writing to a file, or multiple files. And then we use this tool
* to keep the same path on the read and write, and have methods for helping with structured data.
* And if we are not using a structured data, we can use the raw method which forces the construction of a BiMap
* ```ts
import {configSpec} from './configSpec.ts'
import {matches, T} from '../deps.ts';
const { object, string, number, boolean, arrayOf, array, anyOf, allOf } = matches
const someValidator = object({
data: string
})
const jsonFile = ConfigFile.json({
path: 'data.json',
validator: someValidator,
volume: 'main'
})
const tomlFile = ConfigFile.toml({
path: 'data.toml',
validator: someValidator,
volume: 'main'
})
const rawFile = ConfigFile.raw({
path: 'data.amazingSettings',
volume: 'main'
fromData(dataIn: Data): string {
return `myDatais ///- ${dataIn.data}`
},
toData(rawData: string): Data {
const [,data] = /myDatais \/\/\/- (.*)/.match(rawData)
return {data}
}
})
export const setConfig : T.ExpectedExports.setConfig= async (effects, config) => {
await jsonFile.write({ data: 'here lies data'}, effects)
}
export const getConfig: T.ExpectedExports.getConfig = async (effects, config) => ({
spec: configSpec,
config: nullIfEmpty({
...jsonFile.get(effects)
})
```
*/
export class ConfigFile<A> {
protected constructor(
private options: {
path: string;
volume: string;
writeData(dataIn: A): string;
readData(stringValue: string): A;
}
) {}
async write(data: A, effects: T.Effects) {
let matched;
if ((matched = previousPath.exec(this.options.path))) {
await effects.createDir({
volumeId: this.options.volume,
path: matched[1],
});
}
await effects.writeFile({
path: this.options.path,
volumeId: this.options.volume,
toWrite: this.options.writeData(data),
});
}
async read(effects: T.Effects) {
if (
!(await exists(effects, {
path: this.options.path,
volumeId: this.options.volume,
}))
) {
return null;
}
return this.options.readData(
await effects.readFile({
path: this.options.path,
volumeId: this.options.volume,
})
);
}
static raw<A>(options: {
path: string;
volume: string;
fromData(dataIn: A): string;
toData(rawData: string): A;
}) {
return new ConfigFile<A>({
path: options.path,
volume: options.volume,
writeData: options.fromData,
readData: options.toData,
});
}
static json<A>(options: {
path: string;
volume: string;
validator: matches.Validator<unknown, A>;
}) {
return new ConfigFile<A>({
path: options.path,
volume: options.volume,
writeData(inData) {
return JSON.stringify(inData, null, 2);
},
readData(inString) {
return options.validator.unsafeCast(JSON.parse(inString));
},
});
}
static toml<A extends Record<string, unknown>>(options: {
path: string;
volume: string;
validator: matches.Validator<unknown, A>;
}) {
return new ConfigFile<A>({
path: options.path,
volume: options.volume,
writeData(inData) {
return TOML.stringify(inData as TOML.JsonMap);
},
readData(inString) {
return options.validator.unsafeCast(TOML.parse(inString));
},
});
}
static yaml<A extends Record<string, unknown>>(options: {
path: string;
volume: string;
validator: matches.Validator<unknown, A>;
}) {
return new ConfigFile<A>({
path: options.path,
volume: options.volume,
writeData(inData) {
return YAML.stringify(inData);
},
readData(inString) {
return options.validator.unsafeCast(YAML.parse(inString));
},
});
}
}

View File

@@ -1,14 +0,0 @@
import { ConfigFile } from "./config_file";
/**
* A useful tool when doing a getConfig.
* Look into the config {@link ConfigFile} for an example of the use.
* @param s
* @returns
*/
export function nullIfEmpty(s: Record<string, unknown>) {
return Object.keys(s).length === 0 ? null : s;
}
export { setupConfigExports } from "./setup_config_export";
export { ConfigFile };

View File

@@ -220,7 +220,6 @@ export class Checker {
switch (range.substring(0, 1)) {
case ">": {
console.log("greaterThan");
const emVar = EmVer.parse(range.substring(1));
return new Checker((version) => {
const v = EmVer.from(version);

View File

@@ -0,0 +1,75 @@
import { Types } from "..";
import { object, string } from "ts-matches";
export type HealthCheck = (
effects: Types.Effects,
dateMs: number
) => Promise<HealthResult>;
export type HealthResult =
| { success: string }
| { failure: string }
| { disabled: null }
| { starting: null }
| { loading: string };
const hasMessage = object({ message: string }).test;
function safelyStringify(e: unknown) {
if (hasMessage(e)) return e.message;
if (string.test(e)) return e;
try {
return JSON.stringify(e);
} catch (e) {
return "unknown";
}
}
async function timeoutHealth(
effects: Types.Effects,
timeMs: number
): Promise<HealthResult> {
await effects.sleep(timeMs);
return { failure: "Timed out " };
}
export default function healthRunner(
name: string,
fn: HealthCheck,
{ defaultIntervalS = 60 } = {}
) {
return {
create(effects: Types.Effects, defaultIntervalCreatedS = defaultIntervalS) {
let running: any;
function startFn(intervalS: number, timeoutS: number, delayS: number) {
clearInterval(running);
setTimeout(() => {
running = setInterval(async () => {
const result = await Promise.race([
timeoutHealth(effects, timeoutS * 1000),
fn(effects, 123),
]).catch((e) => {
return { failure: safelyStringify(e) };
});
(effects as any).setHealthStatus({
name,
result,
});
}, intervalS * 1000);
}, delayS * 1000);
}
const self = {
stop() {
clearInterval(running);
return self;
},
start({
intervalS = defaultIntervalCreatedS,
timeoutS = 10,
delayS = 0,
} = {}) {
startFn(intervalS, timeoutS, delayS);
return self;
},
};
return self;
},
};
}

2
lib/health/index.ts Normal file
View File

@@ -0,0 +1,2 @@
export * from "./util";
export { default as healthRunner } from "./healthRunner";

View File

@@ -1,5 +1,5 @@
import { Effects, ResultType } from "./types";
import { error, errorCode, isKnownError, ok } from "./util";
import { Effects, ResultType } from "../types";
import { error, errorCode, isKnownError, ok } from "../util";
export const checkWebUrl: (
url: string
) => (
@@ -11,8 +11,9 @@ export const checkWebUrl: (
if (
// deno-lint-ignore no-cond-assign
(errorValue = guardDurationAboveMinimum({ duration, minimumTime: 5000 }))
)
) {
return errorValue;
}
return await effects
.fetch(url)

View File

@@ -2,9 +2,11 @@ export * as matches from "ts-matches";
export * as TOML from "@iarna/toml";
export * as YAML from "yaml";
export * as Types from "./types";
export * as healthUtil from "./healthUtil";
export * as T from "./types";
export * as healthUtil from "./health/util";
export * as util from "./util";
export * as configBuilder from "./config_builder/mod";
export { Backups } from "./backups";
export * as configBuilder from "./config/builder";
export * as backup from "./backup";
export * as configTypes from "./types/config-types";
export * as configTools from "./config_tools/mod";
export * as config from "./config";
export * as health from "./health";

View File

@@ -1,293 +1,305 @@
// // deno-lint-ignore-file no-explicit-any
// import { readLines } from "https://deno.land/std@0.177.0/io/read_lines";
// import { camelCase } from "https://deno.land/x/case@2.1.1/mod";
import camelCase from "lodash/camelCase";
import * as fs from "fs";
// const list: string[] = [];
// if (!Deno.isatty(Deno.stdin.rid)) {
// for await (const line of readLines(Deno.stdin)) {
// line && list.push(line);
// }
// }
export async function writeConvertedFile(
file: string,
inputData: Promise<any> | any,
options: Parameters<typeof makeFileContent>[1]
) {
await fs.writeFile(file, await makeFileContent(inputData, options), (err) =>
console.error(err)
);
}
// console.log(`
// import {configBuilder} from '${await Deno.args[0]}';
// const {Config, Value, List, Variants} = configBuilder;
// `);
// const data = JSON.parse(list.join("\n"));
export default async function makeFileContent(
inputData: Promise<any> | any,
{ startSdk = "start-sdk" } = {}
) {
const outputLines: string[] = [];
outputLines.push(`
import {Config, Value, List, Variants} from '${startSdk}/config/builder';
`);
const data = await inputData;
// const namedConsts = new Set(["Config", "Value", "List"]);
// const configName = newConst("configSpec", convertConfigSpec(data));
// const configMatcherName = newConst(
// "matchConfigSpec",
// `${configName}.validator()`
// );
// console.log(`export type ConfigSpec = typeof ${configMatcherName}._TYPE;`);
const namedConsts = new Set(["Config", "Value", "List"]);
const configName = newConst("configSpec", convertConfigSpec(data));
const configMatcherName = newConst(
"matchConfigSpec",
`${configName}.validator()`
);
outputLines.push(
`export type ConfigSpec = typeof ${configMatcherName}._TYPE;`
);
// function newConst(key: string, data: string) {
// const variableName = getNextConstName(camelCase(key));
// console.log(`export const ${variableName} = ${data};`);
// return variableName;
// }
// function convertConfigSpec(data: any) {
// let answer = "Config.of({";
// for (const [key, value] of Object.entries(data)) {
// const variableName = newConst(key, convertValueSpec(value));
return outputLines.join("\n");
// answer += `"${key}": ${variableName},`;
// }
// return `${answer}});`;
// }
// function convertValueSpec(value: any): string {
// switch (value.type) {
// case "string": {
// return `Value.string(${JSON.stringify(
// {
// name: value.name || null,
// default: value.default || null,
// description: value.description || null,
// warning: value.warning || null,
// nullable: value.nullable || false,
// masked: value.masked || null,
// placeholder: value.placeholder || null,
// pattern: value.pattern || null,
// "pattern-description": value["pattern-description"] || null,
// textarea: value.textarea || null,
// },
// null,
// 2
// )})`;
// }
// case "number": {
// return `Value.number(${JSON.stringify(
// {
// name: value.name || null,
// default: value.default || null,
// description: value.description || null,
// warning: value.warning || null,
// nullable: value.nullable || false,
// range: value.range || null,
// integral: value.integral || false,
// units: value.units || null,
// placeholder: value.placeholder || null,
// },
// null,
// 2
// )})`;
// }
// case "boolean": {
// return `Value.boolean(${JSON.stringify(
// {
// name: value.name || null,
// default: value.default || false,
// description: value.description || null,
// warning: value.warning || null,
// },
// null,
// 2
// )})`;
// }
// case "enum": {
// return `Value.enum(${JSON.stringify(
// {
// name: value.name || null,
// description: value.description || null,
// warning: value.warning || null,
// default: value.default || null,
// values: value.values || null,
// "value-names": value["value-names"] || null,
// },
// null,
// 2
// )})`;
// }
// case "object": {
// const specName = newConst(
// value.name + "_spec",
// convertConfigSpec(value.spec)
// );
// return `Value.object({
// name: ${JSON.stringify(value.name || null)},
// description: ${JSON.stringify(value.description || null)},
// warning: ${JSON.stringify(value.warning || null)},
// default: ${JSON.stringify(value.default || null)},
// "display-as": ${JSON.stringify(value["display-as"] || null)},
// "unique-by": ${JSON.stringify(value["unique-by"] || null)},
// spec: ${specName},
// "value-names": ${JSON.stringify(value["value-names"] || {})},
// })`;
// }
// case "union": {
// const variants = newConst(
// value.name + "_variants",
// convertVariants(value.variants)
// );
// return `Value.union({
// name: ${JSON.stringify(value.name || null)},
// description: ${JSON.stringify(value.description || null)},
// warning: ${JSON.stringify(value.warning || null)},
// default: ${JSON.stringify(value.default || null)},
// variants: ${variants},
// tag: ${JSON.stringify({
// id: value?.tag?.["id"] || null,
// name: value?.tag?.["name"] || null,
// description: value?.tag?.["description"] || null,
// warning: value?.tag?.["warning"] || null,
// "variant-names": value?.tag?.["variant-names"] || {},
// })},
// "display-as": ${JSON.stringify(value["display-as"] || null)},
// "unique-by": ${JSON.stringify(value["unique-by"] || null)},
// "variant-names": ${JSON.stringify(
// (value["variant-names"] as any) || null
// )},
// })`;
// }
// case "list": {
// const list = newConst(value.name + "_list", convertList(value));
// return `Value.list(${list})`;
// }
// case "pointer": {
// return "null as any";
// }
// }
// throw Error(`Unknown type "${value.type}"`);
// }
function newConst(key: string, data: string) {
const variableName = getNextConstName(camelCase(key));
outputLines.push(`export const ${variableName} = ${data};`);
return variableName;
}
function convertConfigSpec(data: any) {
let answer = "Config.of({";
for (const [key, value] of Object.entries(data)) {
const variableName = newConst(key, convertValueSpec(value));
// function convertList(value: any) {
// switch (value.subtype) {
// case "string": {
// return `List.string(${JSON.stringify(
// {
// name: value.name || null,
// range: value.range || null,
// spec: {
// masked: value?.spec?.["masked"] || null,
// placeholder: value?.spec?.["placeholder"] || null,
// pattern: value?.spec?.["pattern"] || null,
// "pattern-description": value?.spec?.["pattern-description"] || null,
// textarea: value?.spec?.["textarea"] || false,
// },
// default: value.default || null,
// description: value.description || null,
// warning: value.warning || null,
// },
// null,
// 2
// )})`;
// }
// case "number": {
// return `List.number(${JSON.stringify(
// {
// name: value.name || null,
// range: value.range || null,
// spec: {
// range: value?.spec?.range || null,
// integral: value?.spec?.integral || false,
// units: value?.spec?.units || null,
// placeholder: value?.spec?.placeholder || null,
// },
// default: value.default || null,
// description: value.description || null,
// warning: value.warning || null,
// },
// null,
// 2
// )})`;
// }
// case "enum": {
// return `List.enum(${JSON.stringify(
// {
// name: value.name || null,
// range: value.range || null,
// spec: {
// values: value?.spec?.["values"] || null,
// "value-names": value?.spec?.["value-names"] || {},
// },
// default: value.default || null,
// description: value.description || null,
// warning: value.warning || null,
// },
// null,
// 2
// )})`;
// }
// case "object": {
// const specName = newConst(
// value.name + "_spec",
// convertConfigSpec(value.spec.spec)
// );
// return `List.obj({
// name: ${JSON.stringify(value.name || null)},
// range: ${JSON.stringify(value.range || null)},
// spec: {
// spec: ${specName},
// "display-as": ${JSON.stringify(
// value?.spec?.["display-as"] || null
// )},
// "unique-by": ${JSON.stringify(value?.spec?.["unique-by"] || null)},
// },
// default: ${JSON.stringify(value.default || null)},
// description: ${JSON.stringify(value.description || null)},
// warning: ${JSON.stringify(value.warning || null)},
// })`;
// }
// case "union": {
// const variants = newConst(
// value.name + "_variants",
// convertConfigSpec(value.spec.variants)
// );
// return `List.union(
// {
// name:${JSON.stringify(value.name || null)},
// range:${JSON.stringify(value.range || null)},
// spec: {
// tag: {
// "id":${JSON.stringify(value?.spec?.tag?.["id"] || null)},
// "name": ${JSON.stringify(
// value?.spec?.tag?.["name"] || null
// )},
// "description": ${JSON.stringify(
// value?.spec?.tag?.["description"] || null
// )},
// "warning": ${JSON.stringify(
// value?.spec?.tag?.["warning"] || null
// )},
// "variant-names": ${JSON.stringify(
// value?.spec?.tag?.["variant-names"] || {}
// )},
// },
// variants: ${variants},
// "display-as": ${JSON.stringify(
// value?.spec?.["display-as"] || null
// )},
// "unique-by": ${JSON.stringify(
// value?.spec?.["unique-by"] || null
// )},
// default: ${JSON.stringify(value?.spec?.["default"] || null)},
// },
// default: ${JSON.stringify(value.default || null)},
// description: ${JSON.stringify(value.description || null)},
// warning: ${JSON.stringify(value.warning || null)},
// }
// )`;
// }
// }
// throw new Error(`Unknown subtype "${value.subtype}"`);
// }
answer += `"${key}": ${variableName},`;
}
return `${answer}});`;
}
function convertValueSpec(value: any): string {
switch (value.type) {
case "string": {
return `Value.string(${JSON.stringify(
{
name: value.name || null,
default: value.default || null,
description: value.description || null,
warning: value.warning || null,
nullable: value.nullable || false,
masked: value.masked || null,
placeholder: value.placeholder || null,
pattern: value.pattern || null,
"pattern-description": value["pattern-description"] || null,
textarea: value.textarea || null,
},
null,
2
)})`;
}
case "number": {
return `Value.number(${JSON.stringify(
{
name: value.name || null,
default: value.default || null,
description: value.description || null,
warning: value.warning || null,
nullable: value.nullable || false,
range: value.range || null,
integral: value.integral || false,
units: value.units || null,
placeholder: value.placeholder || null,
},
null,
2
)})`;
}
case "boolean": {
return `Value.boolean(${JSON.stringify(
{
name: value.name || null,
default: value.default || false,
description: value.description || null,
warning: value.warning || null,
},
null,
2
)})`;
}
case "enum": {
return `Value.enum(${JSON.stringify(
{
name: value.name || null,
description: value.description || null,
warning: value.warning || null,
default: value.default || null,
values: value.values || null,
"value-names": value["value-names"] || null,
},
null,
2
)})`;
}
case "object": {
const specName = newConst(
value.name + "_spec",
convertConfigSpec(value.spec)
);
return `Value.object({
name: ${JSON.stringify(value.name || null)},
description: ${JSON.stringify(value.description || null)},
warning: ${JSON.stringify(value.warning || null)},
default: ${JSON.stringify(value.default || null)},
"display-as": ${JSON.stringify(value["display-as"] || null)},
"unique-by": ${JSON.stringify(value["unique-by"] || null)},
spec: ${specName},
"value-names": ${JSON.stringify(value["value-names"] || {})},
})`;
}
case "union": {
const variants = newConst(
value.name + "_variants",
convertVariants(value.variants)
);
return `Value.union({
name: ${JSON.stringify(value.name || null)},
description: ${JSON.stringify(value.description || null)},
warning: ${JSON.stringify(value.warning || null)},
default: ${JSON.stringify(value.default || null)},
variants: ${variants},
tag: ${JSON.stringify({
id: value?.tag?.["id"] || null,
name: value?.tag?.["name"] || null,
description: value?.tag?.["description"] || null,
warning: value?.tag?.["warning"] || null,
"variant-names": value?.tag?.["variant-names"] || {},
})},
"display-as": ${JSON.stringify(value["display-as"] || null)},
"unique-by": ${JSON.stringify(value["unique-by"] || null)},
"variant-names": ${JSON.stringify(
(value["variant-names"] as any) || null
)},
})`;
}
case "list": {
const list = newConst(value.name + "_list", convertList(value));
return `Value.list(${list})`;
}
case "pointer": {
return "null as any";
}
}
throw Error(`Unknown type "${value.type}"`);
}
// function convertVariants(variants: any) {
// let answer = "Variants.of({";
// for (const [key, value] of Object.entries(variants)) {
// const variableName = newConst(key, convertConfigSpec(value));
// answer += `"${key}": ${variableName},`;
// }
// return `${answer}})`;
// }
function convertList(value: any) {
switch (value.subtype) {
case "string": {
return `List.string(${JSON.stringify(
{
name: value.name || null,
range: value.range || null,
spec: {
masked: value?.spec?.["masked"] || null,
placeholder: value?.spec?.["placeholder"] || null,
pattern: value?.spec?.["pattern"] || null,
"pattern-description":
value?.spec?.["pattern-description"] || null,
textarea: value?.spec?.["textarea"] || false,
},
default: value.default || null,
description: value.description || null,
warning: value.warning || null,
},
null,
2
)})`;
}
case "number": {
return `List.number(${JSON.stringify(
{
name: value.name || null,
range: value.range || null,
spec: {
range: value?.spec?.range || null,
integral: value?.spec?.integral || false,
units: value?.spec?.units || null,
placeholder: value?.spec?.placeholder || null,
},
default: value.default || null,
description: value.description || null,
warning: value.warning || null,
},
null,
2
)})`;
}
case "enum": {
return `List.enum(${JSON.stringify(
{
name: value.name || null,
range: value.range || null,
spec: {
values: value?.spec?.["values"] || null,
"value-names": value?.spec?.["value-names"] || {},
},
default: value.default || null,
description: value.description || null,
warning: value.warning || null,
},
null,
2
)})`;
}
case "object": {
const specName = newConst(
value.name + "_spec",
convertConfigSpec(value.spec.spec)
);
return `List.obj({
name: ${JSON.stringify(value.name || null)},
range: ${JSON.stringify(value.range || null)},
spec: {
spec: ${specName},
"display-as": ${JSON.stringify(
value?.spec?.["display-as"] || null
)},
"unique-by": ${JSON.stringify(value?.spec?.["unique-by"] || null)},
},
default: ${JSON.stringify(value.default || null)},
description: ${JSON.stringify(value.description || null)},
warning: ${JSON.stringify(value.warning || null)},
})`;
}
case "union": {
const variants = newConst(
value.name + "_variants",
convertConfigSpec(value.spec.variants)
);
return `List.union(
{
name:${JSON.stringify(value.name || null)},
range:${JSON.stringify(value.range || null)},
spec: {
tag: {
"id":${JSON.stringify(value?.spec?.tag?.["id"] || null)},
"name": ${JSON.stringify(
value?.spec?.tag?.["name"] || null
)},
"description": ${JSON.stringify(
value?.spec?.tag?.["description"] || null
)},
"warning": ${JSON.stringify(
value?.spec?.tag?.["warning"] || null
)},
"variant-names": ${JSON.stringify(
value?.spec?.tag?.["variant-names"] || {}
)},
},
variants: ${variants},
"display-as": ${JSON.stringify(
value?.spec?.["display-as"] || null
)},
"unique-by": ${JSON.stringify(
value?.spec?.["unique-by"] || null
)},
default: ${JSON.stringify(value?.spec?.["default"] || null)},
},
default: ${JSON.stringify(value.default || null)},
description: ${JSON.stringify(value.description || null)},
warning: ${JSON.stringify(value.warning || null)},
}
)`;
}
}
throw new Error(`Unknown subtype "${value.subtype}"`);
}
// function getNextConstName(name: string, i = 0): string {
// const newName = !i ? name : name + i;
// if (namedConsts.has(newName)) {
// return getNextConstName(name, i + 1);
// }
// namedConsts.add(newName);
// return newName;
// }
function convertVariants(variants: any) {
let answer = "Variants.of({";
for (const [key, value] of Object.entries(variants)) {
const variableName = newConst(key, convertConfigSpec(value));
answer += `"${key}": ${variableName},`;
}
return `${answer}})`;
}
function getNextConstName(name: string, i = 0): string {
const newName = !i ? name : name + i;
if (namedConsts.has(newName)) {
return getNextConstName(name, i + 1);
}
namedConsts.add(newName);
return newName;
}
}

View File

@@ -371,3 +371,7 @@ export type Dependencies = {
): Promise<ResultType<ConfigSpec>>;
};
};
export type DeepPartial<T> = T extends {}
? { [P in keyof T]?: DeepPartial<T[P]> }
: T;

View File

View File

@@ -0,0 +1,363 @@
import { writeConvertedFile } from "../../scripts/oldSpecToBuilder";
import { writeFileSync, readFileSync } from "fs";
writeConvertedFile(
"./lib/util/artifacts/output.ts",
{
rpc: {
type: "object",
name: "RPC Settings",
description: "RPC configuration options.",
spec: {
enable: {
type: "boolean",
name: "Enable",
description: "Allow remote RPC requests.",
default: true,
},
username: {
type: "string",
nullable: false,
name: "Username",
description: "The username for connecting to Bitcoin over RPC.",
default: "bitcoin",
masked: true,
pattern: "^[a-zA-Z0-9_]+$",
"pattern-description":
"Must be alphanumeric (can contain underscore).",
},
password: {
type: "string",
nullable: false,
name: "RPC Password",
description: "The password for connecting to Bitcoin over RPC.",
default: {
charset: "a-z,2-7",
len: 20,
},
pattern: '^[^\\n"]*$',
"pattern-description":
"Must not contain newline or quote characters.",
copyable: true,
masked: true,
},
advanced: {
type: "object",
name: "Advanced",
description: "Advanced RPC Settings",
spec: {
auth: {
name: "Authorization",
description:
"Username and hashed password for JSON-RPC connections. RPC clients connect using the usual http basic authentication.",
type: "list",
subtype: "string",
default: [],
spec: {
pattern:
"^[a-zA-Z0-9_-]+:([0-9a-fA-F]{2})+\\$([0-9a-fA-F]{2})+$",
"pattern-description":
'Each item must be of the form "<USERNAME>:<SALT>$<HASH>".',
masked: false,
},
range: "[0,*)",
},
serialversion: {
name: "Serialization Version",
description:
"Return raw transaction or block hex with Segwit or non-SegWit serialization.",
type: "enum",
values: ["non-segwit", "segwit"],
"value-names": {},
default: "segwit",
},
servertimeout: {
name: "Rpc Server Timeout",
description:
"Number of seconds after which an uncompleted RPC call will time out.",
type: "number",
nullable: false,
range: "[5,300]",
integral: true,
units: "seconds",
default: 30,
},
threads: {
name: "Threads",
description:
"Set the number of threads for handling RPC calls. You may wish to increase this if you are making lots of calls via an integration.",
type: "number",
nullable: false,
default: 16,
range: "[1,64]",
integral: true,
},
workqueue: {
name: "Work Queue",
description:
"Set the depth of the work queue to service RPC calls. Determines how long the backlog of RPC requests can get before it just rejects new ones.",
type: "number",
nullable: false,
default: 128,
range: "[8,256]",
integral: true,
units: "requests",
},
},
},
},
},
"zmq-enabled": {
type: "boolean",
name: "ZeroMQ Enabled",
description: "Enable the ZeroMQ interface",
default: true,
},
txindex: {
type: "boolean",
name: "Transaction Index",
description: "Enable the Transaction Index (txindex)",
default: true,
},
wallet: {
type: "object",
name: "Wallet",
description: "Wallet Settings",
spec: {
enable: {
name: "Enable Wallet",
description: "Load the wallet and enable wallet RPC calls.",
type: "boolean",
default: true,
},
avoidpartialspends: {
name: "Avoid Partial Spends",
description:
"Group outputs by address, selecting all or none, instead of selecting on a per-output basis. This improves privacy at the expense of higher transaction fees.",
type: "boolean",
default: true,
},
discardfee: {
name: "Discard Change Tolerance",
description:
"The fee rate (in BTC/kB) that indicates your tolerance for discarding change by adding it to the fee.",
type: "number",
nullable: false,
default: 0.0001,
range: "[0,.01]",
integral: false,
units: "BTC/kB",
},
},
},
advanced: {
type: "object",
name: "Advanced",
description: "Advanced Settings",
spec: {
mempool: {
type: "object",
name: "Mempool",
description: "Mempool Settings",
spec: {
mempoolfullrbf: {
name: "Enable Full RBF",
description:
"Policy for your node to use for relaying and mining unconfirmed transactions. For details, see https://github.com/bitcoin/bitcoin/blob/master/doc/release-notes/release-notes-24.0.md#notice-of-new-option-for-transaction-replacement-policies",
type: "boolean",
default: false,
},
persistmempool: {
type: "boolean",
name: "Persist Mempool",
description: "Save the mempool on shutdown and load on restart.",
default: true,
},
maxmempool: {
type: "number",
nullable: false,
name: "Max Mempool Size",
description:
"Keep the transaction memory pool below <n> megabytes.",
range: "[1,*)",
integral: true,
units: "MiB",
default: 300,
},
mempoolexpiry: {
type: "number",
nullable: false,
name: "Mempool Expiration",
description:
"Do not keep transactions in the mempool longer than <n> hours.",
range: "[1,*)",
integral: true,
units: "Hr",
default: 336,
},
},
},
peers: {
type: "object",
name: "Peers",
description: "Peer Connection Settings",
spec: {
listen: {
type: "boolean",
name: "Make Public",
description:
"Allow other nodes to find your server on the network.",
default: true,
},
onlyconnect: {
type: "boolean",
name: "Disable Peer Discovery",
description: "Only connect to specified peers.",
default: false,
},
onlyonion: {
type: "boolean",
name: "Disable Clearnet",
description: "Only connect to peers over Tor.",
default: false,
},
addnode: {
name: "Add Nodes",
description: "Add addresses of nodes to connect to.",
type: "list",
subtype: "object",
range: "[0,*)",
default: [],
spec: {
"unique-by": null,
spec: {
hostname: {
type: "string",
nullable: false,
name: "Hostname",
description: "Domain or IP address of bitcoin peer",
pattern:
"(^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$)|((^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$)|(^[a-z2-7]{16}\\.onion$)|(^([a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?\\.)+[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$))",
"pattern-description":
"Must be either a domain name, or an IPv4 or IPv6 address. Do not include protocol scheme (eg 'http://') or port.",
masked: false,
},
port: {
type: "number",
nullable: true,
name: "Port",
description:
"Port that peer is listening on for inbound p2p connections",
range: "[0,65535]",
integral: true,
},
},
},
},
},
},
dbcache: {
type: "number",
nullable: true,
name: "Database Cache",
description:
"How much RAM to allocate for caching the TXO set. Higher values improve syncing performance, but increase your chance of using up all your system's memory or corrupting your database in the event of an ungraceful shutdown. Set this high but comfortably below your system's total RAM during IBD, then turn down to 450 (or leave blank) once the sync completes.",
warning:
"WARNING: Increasing this value results in a higher chance of ungraceful shutdowns, which can leave your node unusable if it happens during the initial block download. Use this setting with caution. Be sure to set this back to the default (450 or leave blank) once your node is synced. DO NOT press the STOP button if your dbcache is large. Instead, set this number back to the default, hit save, and wait for bitcoind to restart on its own.",
range: "(0,*)",
integral: true,
units: "MiB",
},
pruning: {
type: "union",
name: "Pruning Settings",
description:
"Blockchain Pruning Options\nReduce the blockchain size on disk\n",
warning:
"If you set pruning to Manual and your disk is smaller than the total size of the blockchain, you MUST have something running that prunes these blocks or you may overfill your disk!\nDisabling pruning will convert your node into a full archival node. This requires a resync of the entire blockchain, a process that may take several days. Make sure you have enough free disk space or you may fill up your disk.\n",
tag: {
id: "mode",
name: "Pruning Mode",
description:
'- Disabled: Disable pruning\n- Automatic: Limit blockchain size on disk to a certain number of megabytes\n- Manual: Prune blockchain with the "pruneblockchain" RPC\n',
"variant-names": {
disabled: "Disabled",
automatic: "Automatic",
manual: "Manual",
},
},
variants: {
disabled: {},
automatic: {
size: {
type: "number",
nullable: false,
name: "Max Chain Size",
description: "Limit of blockchain size on disk.",
warning:
"Increasing this value will require re-syncing your node.",
default: 550,
range: "[550,1000000)",
integral: true,
units: "MiB",
},
},
manual: {
size: {
type: "number",
nullable: false,
name: "Failsafe Chain Size",
description: "Prune blockchain if size expands beyond this.",
default: 65536,
range: "[550,1000000)",
integral: true,
units: "MiB",
},
},
},
default: "disabled",
},
blockfilters: {
type: "object",
name: "Block Filters",
description: "Settings for storing and serving compact block filters",
spec: {
blockfilterindex: {
type: "boolean",
name: "Compute Compact Block Filters (BIP158)",
description:
"Generate Compact Block Filters during initial sync (IBD) to enable 'getblockfilter' RPC. This is useful if dependent services need block filters to efficiently scan for addresses/transactions etc.",
default: true,
},
peerblockfilters: {
type: "boolean",
name: "Serve Compact Block Filters to Peers (BIP157)",
description:
"Serve Compact Block Filters as a peer service to other nodes on the network. This is useful if you wish to connect an SPV client to your node to make it efficient to scan transactions without having to download all block data. 'Compute Compact Block Filters (BIP158)' is required.",
default: false,
},
},
},
bloomfilters: {
type: "object",
name: "Bloom Filters (BIP37)",
description: "Setting for serving Bloom Filters",
spec: {
peerbloomfilters: {
type: "boolean",
name: "Serve Bloom Filters to Peers",
description:
"Peers have the option of setting filters on each connection they make after the version handshake has completed. Bloom filters are for clients implementing SPV (Simplified Payment Verification) that want to check that block headers connect together correctly, without needing to verify the full blockchain. The client must trust that the transactions in the chain are in fact valid. It is highly recommended AGAINST using for anything except Bisq integration.",
warning:
"This is ONLY for use with Bisq integration, please use Block Filters for all other applications.",
default: false,
},
},
},
},
},
},
{
startSdk: "../..",
}
);

View File

@@ -1,7 +1,6 @@
import { configBuilder } from "../../index";
const { Config, Value, List, Variants } = configBuilder;
import { Config, Value, List, Variants } from "../../config/builder";
export const enable = configBuilder.Value.boolean({
export const enable = Value.boolean({
name: "Enable",
default: true,
description: "Allow remote RPC requests.",
@@ -439,7 +438,7 @@ export const advanced1 = Value.object({
spec: advancedSpec1,
"value-names": {},
});
export const configSpec = configBuilder.Config.of({
export const configSpec = Config.of({
rpc: rpc,
"zmq-enabled": zmqEnabled,
txindex: txindex,

150
lib/util/fileHelper.ts Normal file
View File

@@ -0,0 +1,150 @@
import * as matches from "ts-matches";
import * as YAML from "yaml";
import * as TOML from "@iarna/toml";
import * as T from "../types";
import { exists } from ".";
const previousPath = /(.+?)\/([^/]*)$/;
/**
* Used in the get config and the set config exported functions.
* The idea is that we are going to be reading/ writing to a file, or multiple files. And then we use this tool
* to keep the same path on the read and write, and have methods for helping with structured data.
* And if we are not using a structured data, we can use the raw method which forces the construction of a BiMap
* ```ts
import {configSpec} from './configSpec.ts'
import {matches, T} from '../deps.ts';
const { object, string, number, boolean, arrayOf, array, anyOf, allOf } = matches
const someValidator = object({
data: string
})
const jsonFile = FileHelper.json({
path: 'data.json',
validator: someValidator,
volume: 'main'
})
const tomlFile = FileHelper.toml({
path: 'data.toml',
validator: someValidator,
volume: 'main'
})
const rawFile = FileHelper.raw({
path: 'data.amazingSettings',
volume: 'main'
fromData(dataIn: Data): string {
return `myDatais ///- ${dataIn.data}`
},
toData(rawData: string): Data {
const [,data] = /myDatais \/\/\/- (.*)/.match(rawData)
return {data}
}
})
export const setConfig : T.ExpectedExports.setConfig= async (effects, config) => {
await jsonFile.write({ data: 'here lies data'}, effects)
}
export const getConfig: T.ExpectedExports.getConfig = async (effects, config) => ({
spec: configSpec,
config: nullIfEmpty({
...jsonFile.get(effects)
})
```
*/
export class FileHelper<A> {
protected constructor(
readonly path: string,
readonly volume: string,
readonly writeData: (dataIn: A) => string,
readonly readData: (stringValue: string) => A
) {}
async write(data: A, effects: T.Effects) {
let matched;
if ((matched = previousPath.exec(this.path))) {
await effects.createDir({
volumeId: this.volume,
path: matched[1],
});
}
await effects.writeFile({
path: this.path,
volumeId: this.volume,
toWrite: this.writeData(data),
});
}
async read(effects: T.Effects) {
if (
!(await exists(effects, {
path: this.path,
volumeId: this.volume,
}))
) {
return null;
}
return this.readData(
await effects.readFile({
path: this.path,
volumeId: this.volume,
})
);
}
static raw<A>(
path: string,
volume: string,
toFile: (dataIn: A) => string,
fromFile: (rawData: string) => A
) {
return new FileHelper<A>(path, volume, toFile, fromFile);
}
static json<A>(
path: string,
volume: string,
shape: matches.Validator<unknown, A>
) {
return new FileHelper<A>(
path,
volume,
(inData) => {
return JSON.stringify(inData, null, 2);
},
(inString) => {
return shape.unsafeCast(JSON.parse(inString));
}
);
}
static toml<A extends Record<string, unknown>>(
path: string,
volume: string,
shape: matches.Validator<unknown, A>
) {
return new FileHelper<A>(
path,
volume,
(inData) => {
return JSON.stringify(inData, null, 2);
},
(inString) => {
return shape.unsafeCast(TOML.parse(inString));
}
);
}
static yaml<A extends Record<string, unknown>>(
path: string,
volume: string,
shape: matches.Validator<unknown, A>
) {
return new FileHelper<A>(
path,
volume,
(inData) => {
return JSON.stringify(inData, null, 2);
},
(inString) => {
return shape.unsafeCast(YAML.parse(inString));
}
);
}
}
export default FileHelper;

View File

@@ -1,6 +1,8 @@
import * as T from "./types";
import * as T from "../types";
export { guardAll, typeFromProps } from "./utils/propertiesMatcher";
export { guardAll, typeFromProps } from "./propertiesMatcher";
export { default as nullIfEmpty } from "./nullIfEmpty";
export { FileHelper } from "./fileHelper";
export function unwrapResultType<T>(res: T.ResultType<T>): T {
if ("error-code" in res) {

10
lib/util/nullIfEmpty.ts Normal file
View File

@@ -0,0 +1,10 @@
/**
* A useful tool when doing a getConfig.
* Look into the config {@link FileHelper} for an example of the use.
* @param s
* @returns
*/
export default function nullIfEmpty(s: null | Record<string, unknown>) {
if (s === null) return null;
return Object.keys(s).length === 0 ? null : s;
}

View File

@@ -1,326 +0,0 @@
{
"rpc": {
"type": "object",
"name": "RPC Settings",
"description": "RPC configuration options.",
"spec": {
"enable": {
"type": "boolean",
"name": "Enable",
"description": "Allow remote RPC requests.",
"default": true
},
"username": {
"type": "string",
"nullable": false,
"name": "Username",
"description": "The username for connecting to Bitcoin over RPC.",
"default": "bitcoin",
"masked": true,
"pattern": "^[a-zA-Z0-9_]+$",
"pattern-description": "Must be alphanumeric (can contain underscore)."
},
"password": {
"type": "string",
"nullable": false,
"name": "RPC Password",
"description": "The password for connecting to Bitcoin over RPC.",
"default": {
"charset": "a-z,2-7",
"len": 20
},
"pattern": "^[^\\n\"]*$",
"pattern-description": "Must not contain newline or quote characters.",
"copyable": true,
"masked": true
},
"advanced": {
"type": "object",
"name": "Advanced",
"description": "Advanced RPC Settings",
"spec": {
"auth": {
"name": "Authorization",
"description": "Username and hashed password for JSON-RPC connections. RPC clients connect using the usual http basic authentication.",
"type": "list",
"subtype": "string",
"default": [],
"spec": {
"pattern": "^[a-zA-Z0-9_-]+:([0-9a-fA-F]{2})+\\$([0-9a-fA-F]{2})+$",
"pattern-description": "Each item must be of the form \"<USERNAME>:<SALT>$<HASH>\".",
"masked": false
},
"range": "[0,*)"
},
"serialversion": {
"name": "Serialization Version",
"description": "Return raw transaction or block hex with Segwit or non-SegWit serialization.",
"type": "enum",
"values": ["non-segwit", "segwit"],
"value-names": {},
"default": "segwit"
},
"servertimeout": {
"name": "Rpc Server Timeout",
"description": "Number of seconds after which an uncompleted RPC call will time out.",
"type": "number",
"nullable": false,
"range": "[5,300]",
"integral": true,
"units": "seconds",
"default": 30
},
"threads": {
"name": "Threads",
"description": "Set the number of threads for handling RPC calls. You may wish to increase this if you are making lots of calls via an integration.",
"type": "number",
"nullable": false,
"default": 16,
"range": "[1,64]",
"integral": true
},
"workqueue": {
"name": "Work Queue",
"description": "Set the depth of the work queue to service RPC calls. Determines how long the backlog of RPC requests can get before it just rejects new ones.",
"type": "number",
"nullable": false,
"default": 128,
"range": "[8,256]",
"integral": true,
"units": "requests"
}
}
}
}
},
"zmq-enabled": {
"type": "boolean",
"name": "ZeroMQ Enabled",
"description": "Enable the ZeroMQ interface",
"default": true
},
"txindex": {
"type": "boolean",
"name": "Transaction Index",
"description": "Enable the Transaction Index (txindex)",
"default": true
},
"wallet": {
"type": "object",
"name": "Wallet",
"description": "Wallet Settings",
"spec": {
"enable": {
"name": "Enable Wallet",
"description": "Load the wallet and enable wallet RPC calls.",
"type": "boolean",
"default": true
},
"avoidpartialspends": {
"name": "Avoid Partial Spends",
"description": "Group outputs by address, selecting all or none, instead of selecting on a per-output basis. This improves privacy at the expense of higher transaction fees.",
"type": "boolean",
"default": true
},
"discardfee": {
"name": "Discard Change Tolerance",
"description": "The fee rate (in BTC/kB) that indicates your tolerance for discarding change by adding it to the fee.",
"type": "number",
"nullable": false,
"default": 0.0001,
"range": "[0,.01]",
"integral": false,
"units": "BTC/kB"
}
}
},
"advanced": {
"type": "object",
"name": "Advanced",
"description": "Advanced Settings",
"spec": {
"mempool": {
"type": "object",
"name": "Mempool",
"description": "Mempool Settings",
"spec": {
"mempoolfullrbf": {
"name": "Enable Full RBF",
"description": "Policy for your node to use for relaying and mining unconfirmed transactions. For details, see https://github.com/bitcoin/bitcoin/blob/master/doc/release-notes/release-notes-24.0.md#notice-of-new-option-for-transaction-replacement-policies",
"type": "boolean",
"default": false
},
"persistmempool": {
"type": "boolean",
"name": "Persist Mempool",
"description": "Save the mempool on shutdown and load on restart.",
"default": true
},
"maxmempool": {
"type": "number",
"nullable": false,
"name": "Max Mempool Size",
"description": "Keep the transaction memory pool below <n> megabytes.",
"range": "[1,*)",
"integral": true,
"units": "MiB",
"default": 300
},
"mempoolexpiry": {
"type": "number",
"nullable": false,
"name": "Mempool Expiration",
"description": "Do not keep transactions in the mempool longer than <n> hours.",
"range": "[1,*)",
"integral": true,
"units": "Hr",
"default": 336
}
}
},
"peers": {
"type": "object",
"name": "Peers",
"description": "Peer Connection Settings",
"spec": {
"listen": {
"type": "boolean",
"name": "Make Public",
"description": "Allow other nodes to find your server on the network.",
"default": true
},
"onlyconnect": {
"type": "boolean",
"name": "Disable Peer Discovery",
"description": "Only connect to specified peers.",
"default": false
},
"onlyonion": {
"type": "boolean",
"name": "Disable Clearnet",
"description": "Only connect to peers over Tor.",
"default": false
},
"addnode": {
"name": "Add Nodes",
"description": "Add addresses of nodes to connect to.",
"type": "list",
"subtype": "object",
"range": "[0,*)",
"default": [],
"spec": {
"unique-by": null,
"spec": {
"hostname": {
"type": "string",
"nullable": false,
"name": "Hostname",
"description": "Domain or IP address of bitcoin peer",
"pattern": "(^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$)|((^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$)|(^[a-z2-7]{16}\\.onion$)|(^([a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?\\.)+[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$))",
"pattern-description": "Must be either a domain name, or an IPv4 or IPv6 address. Do not include protocol scheme (eg 'http://') or port.",
"masked": false
},
"port": {
"type": "number",
"nullable": true,
"name": "Port",
"description": "Port that peer is listening on for inbound p2p connections",
"range": "[0,65535]",
"integral": true
}
}
}
}
}
},
"dbcache": {
"type": "number",
"nullable": true,
"name": "Database Cache",
"description": "How much RAM to allocate for caching the TXO set. Higher values improve syncing performance, but increase your chance of using up all your system's memory or corrupting your database in the event of an ungraceful shutdown. Set this high but comfortably below your system's total RAM during IBD, then turn down to 450 (or leave blank) once the sync completes.",
"warning": "WARNING: Increasing this value results in a higher chance of ungraceful shutdowns, which can leave your node unusable if it happens during the initial block download. Use this setting with caution. Be sure to set this back to the default (450 or leave blank) once your node is synced. DO NOT press the STOP button if your dbcache is large. Instead, set this number back to the default, hit save, and wait for bitcoind to restart on its own.",
"range": "(0,*)",
"integral": true,
"units": "MiB"
},
"pruning": {
"type": "union",
"name": "Pruning Settings",
"description": "Blockchain Pruning Options\nReduce the blockchain size on disk\n",
"warning": "If you set pruning to Manual and your disk is smaller than the total size of the blockchain, you MUST have something running that prunes these blocks or you may overfill your disk!\nDisabling pruning will convert your node into a full archival node. This requires a resync of the entire blockchain, a process that may take several days. Make sure you have enough free disk space or you may fill up your disk.\n",
"tag": {
"id": "mode",
"name": "Pruning Mode",
"description": "- Disabled: Disable pruning\n- Automatic: Limit blockchain size on disk to a certain number of megabytes\n- Manual: Prune blockchain with the \"pruneblockchain\" RPC\n",
"variant-names": {
"disabled": "Disabled",
"automatic": "Automatic",
"manual": "Manual"
}
},
"variants": {
"disabled": {},
"automatic": {
"size": {
"type": "number",
"nullable": false,
"name": "Max Chain Size",
"description": "Limit of blockchain size on disk.",
"warning": "Increasing this value will require re-syncing your node.",
"default": 550,
"range": "[550,1000000)",
"integral": true,
"units": "MiB"
}
},
"manual": {
"size": {
"type": "number",
"nullable": false,
"name": "Failsafe Chain Size",
"description": "Prune blockchain if size expands beyond this.",
"default": 65536,
"range": "[550,1000000)",
"integral": true,
"units": "MiB"
}
}
},
"default": "disabled"
},
"blockfilters": {
"type": "object",
"name": "Block Filters",
"description": "Settings for storing and serving compact block filters",
"spec": {
"blockfilterindex": {
"type": "boolean",
"name": "Compute Compact Block Filters (BIP158)",
"description": "Generate Compact Block Filters during initial sync (IBD) to enable 'getblockfilter' RPC. This is useful if dependent services need block filters to efficiently scan for addresses/transactions etc.",
"default": true
},
"peerblockfilters": {
"type": "boolean",
"name": "Serve Compact Block Filters to Peers (BIP157)",
"description": "Serve Compact Block Filters as a peer service to other nodes on the network. This is useful if you wish to connect an SPV client to your node to make it efficient to scan transactions without having to download all block data. 'Compute Compact Block Filters (BIP158)' is required.",
"default": false
}
}
},
"bloomfilters": {
"type": "object",
"name": "Bloom Filters (BIP37)",
"description": "Setting for serving Bloom Filters",
"spec": {
"peerbloomfilters": {
"type": "boolean",
"name": "Serve Bloom Filters to Peers",
"description": "Peers have the option of setting filters on each connection they make after the version handshake has completed. Bloom filters are for clients implementing SPV (Simplified Payment Verification) that want to check that block headers connect together correctly, without needing to verify the full blockchain. The client must trust that the transactions in the chain are in fact valid. It is highly recommended AGAINST using for anything except Bisq integration.",
"warning": "This is ONLY for use with Bisq integration, please use Block Filters for all other applications.",
"default": false
}
}
}
}
}
}

28
package-lock.json generated
View File

@@ -1,20 +1,22 @@
{
"name": "start-sdk",
"version": "0.4.0-lib0.alpha1",
"version": "0.4.0-lib0.alpha2",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "start-sdk",
"version": "0.4.0-lib0.alpha1",
"version": "0.4.0-lib0.alpha2",
"license": "MIT",
"dependencies": {
"@iarna/toml": "^2.2.5",
"lodash": "^4.17.21",
"ts-matches": "^5.4.1",
"yaml": "^2.2.1"
},
"devDependencies": {
"@types/jest": "^29.4.0",
"@types/lodash": "^4.14.191",
"jest": "^29.4.3",
"ts-jest": "^29.0.5",
"ts-node": "^10.9.1",
@@ -1582,6 +1584,12 @@
"integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==",
"dev": true
},
"node_modules/@types/lodash": {
"version": "4.14.191",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.191.tgz",
"integrity": "sha512-BdZ5BCCvho3EIXw6wUCXHe7rS53AIDPLE+JzwgT+OsJk53oBfbSmZZ7CX4VaRoN78N+TJpFi9QPlfIVNmJYWxQ==",
"dev": true
},
"node_modules/@types/node": {
"version": "18.14.2",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.2.tgz",
@@ -3761,6 +3769,11 @@
"node": ">=8"
}
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/lodash.memoize": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
@@ -6391,6 +6404,12 @@
"integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==",
"dev": true
},
"@types/lodash": {
"version": "4.14.191",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.191.tgz",
"integrity": "sha512-BdZ5BCCvho3EIXw6wUCXHe7rS53AIDPLE+JzwgT+OsJk53oBfbSmZZ7CX4VaRoN78N+TJpFi9QPlfIVNmJYWxQ==",
"dev": true
},
"@types/node": {
"version": "18.14.2",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.2.tgz",
@@ -8057,6 +8076,11 @@
"p-locate": "^4.1.0"
}
},
"lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"lodash.memoize": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",

View File

@@ -1,21 +1,13 @@
{
"name": "start-sdk",
"version": "0.4.0-lib0.alpha1",
"version": "0.4.0-lib0.alpha3",
"description": "For making the patterns that are wanted in making services for the startOS.",
"main": "./dist/index.cjs",
"types": "./dist/index.d.ts",
"module": "./dist/index.mjs",
"files": [
"dist/**/*"
],
"exports": {
".": {
"import": "./dist/index.mjs",
"require": "./dist/index.cjs"
}
},
"main": "./index.cjs",
"types": "./index.d.ts",
"module": "./index.mjs",
"scripts": {
"test": "jest"
"test": "jest -c ./jest.config.js",
"buildOutput": "ts-node --esm ./lib/util/artifacts/makeOutput.ts"
},
"repository": {
"type": "git",
@@ -29,11 +21,13 @@
"homepage": "https://github.com/Start9Labs/start-sdk#readme",
"dependencies": {
"@iarna/toml": "^2.2.5",
"lodash": "^4.17.21",
"ts-matches": "^5.4.1",
"yaml": "^2.2.1"
},
"devDependencies": {
"@types/jest": "^29.4.0",
"@types/lodash": "^4.14.191",
"jest": "^29.4.3",
"ts-jest": "^29.0.5",
"ts-node": "^10.9.1",

View File

@@ -13,5 +13,10 @@
"strict": true,
"skipLibCheck": true
},
"ts-node": {
"compilerOptions": {
"module": "commonjs"
}
}
}