mirror of
https://github.com/Start9Labs/patch-db.git
synced 2026-03-26 02:11:54 +00:00
move client from separate repo
This commit is contained in:
2
client/.gitignore
vendored
Normal file
2
client/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
dist
|
||||
node_modules
|
||||
9
client/index.ts
Normal file
9
client/index.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export * from './lib/store'
|
||||
|
||||
export * from './lib/source/poll-source'
|
||||
export * from './lib/source/ws-source'
|
||||
export * from './lib/source/source'
|
||||
|
||||
export * from './lib/action-serializer'
|
||||
export * from './lib/patch-db'
|
||||
export * from './lib/sequence-store'
|
||||
44
client/lib/action-serializer.ts
Normal file
44
client/lib/action-serializer.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { Subject, BehaviorSubject, of, Observable, Observer, throwError } from 'rxjs'
|
||||
import { concatMap, map, catchError, filter, take } from 'rxjs/operators'
|
||||
|
||||
export type Action<T> = {
|
||||
action: () => T,
|
||||
notify: BehaviorSubject<undefined | T>
|
||||
}
|
||||
|
||||
export class ActionSerializer {
|
||||
private readonly sequentialActions = new Subject<Action<any>>()
|
||||
|
||||
constructor () {
|
||||
this.sequentialActions.pipe(
|
||||
concatMap(({ action, notify }) => fromSync$(action).pipe(
|
||||
catchError(e => of(notify.next({ error: e }))),
|
||||
map(result => notify.next({ result })),
|
||||
)),
|
||||
catchError(e => of(console.error(`Action Serializer Exception`, e))),
|
||||
).subscribe()
|
||||
}
|
||||
|
||||
run$<T> (action: () => T): Observable<T> {
|
||||
const notify = new BehaviorSubject(undefined) as BehaviorSubject<T | undefined>
|
||||
this.sequentialActions.next({ action, notify })
|
||||
return (notify as BehaviorSubject<T>).pipe(
|
||||
filter(res => res !== undefined),
|
||||
take(1),
|
||||
concatMap((res: any) => res.error ? throwError(res.error) : of(res.result)),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function fromSync$<S, T> (action: (s: S) => T, s: S): Observable<T>
|
||||
function fromSync$<T> (action: () => T): Observable<T>
|
||||
function fromSync$<S, T> (action: (s: S) => T, s?: S): Observable<T> {
|
||||
return new Observable((subscriber: Observer<T>) => {
|
||||
try {
|
||||
subscriber.next(action(s as S))
|
||||
subscriber.complete()
|
||||
} catch (e) {
|
||||
subscriber.error(e)
|
||||
}
|
||||
})
|
||||
}
|
||||
136
client/lib/patch-db.ts
Normal file
136
client/lib/patch-db.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
import { EMPTY, from, merge, Observable, of, Subject, timer } from 'rxjs'
|
||||
import { catchError, concatMap, delay, finalize, map, skip, take, takeUntil, tap, throttleTime } from 'rxjs/operators'
|
||||
import { Source } from './source/source'
|
||||
import { Dump, SequenceStore, Result, Revision } from './sequence-store'
|
||||
import { Store } from './store'
|
||||
export { Operation } from 'fast-json-patch'
|
||||
|
||||
export class PatchDB<T extends object> {
|
||||
private readonly cancelStashTimeout = new Subject()
|
||||
|
||||
private constructor (
|
||||
private readonly sources: Source<T>[],
|
||||
private readonly http: Http<T>,
|
||||
private readonly sequenceStore: SequenceStore<T>,
|
||||
private readonly timeoutForMissingRevision: number = 5000,
|
||||
) { }
|
||||
|
||||
get store (): Store<T> { return this.sequenceStore.store }
|
||||
|
||||
static async init<T extends object> (conf: PatchDbConfig<T>): Promise<PatchDB<T>> {
|
||||
console.log('PATCHDB - init(): ', conf)
|
||||
const { sources, http, bootstrapper, timeoutForMissingRevision } = conf
|
||||
|
||||
let sequence: number = 0
|
||||
let data: T = { } as T
|
||||
if (bootstrapper) {
|
||||
try {
|
||||
const cache = await bootstrapper.init()
|
||||
console.log('FROM CACHE', cache)
|
||||
sequence = cache.sequence
|
||||
data = cache.data
|
||||
} catch (e) {
|
||||
console.error('bootstrapper failed: ', e)
|
||||
}
|
||||
}
|
||||
|
||||
const store = new Store(data)
|
||||
|
||||
const sequenceStore = new SequenceStore(store, sequence)
|
||||
|
||||
// update cache when sequenceStore emits, throttled
|
||||
if (bootstrapper) {
|
||||
sequenceStore.watch$().pipe(throttleTime(500), delay(500), skip(1)).subscribe(({ data, sequence }) => {
|
||||
console.log('PATCHDB - update cache(): ', sequence, data)
|
||||
bootstrapper.update({ sequence, data }).catch(e => {
|
||||
console.error('Exception in updateCache: ', e)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
return new PatchDB(sources, http, sequenceStore, timeoutForMissingRevision)
|
||||
}
|
||||
|
||||
sync$ (): Observable<void> {
|
||||
console.log('PATCHDB - sync$()')
|
||||
const sequence$ = this.sequenceStore.watch$().pipe(map(cache => cache.sequence))
|
||||
// nested concatMaps, as it is written, ensure sync is not run for update2 until handleSyncResult is complete for update1.
|
||||
// flat concatMaps would allow many syncs to run while handleSyncResult was hanging. We can consider such an idea if performance requires it.
|
||||
return merge(...this.sources.map(s => s.watch$(sequence$))).pipe(
|
||||
tap(update => console.log('PATCHDB - sources updated:', update)),
|
||||
concatMap(update =>
|
||||
this.sequenceStore.update$(update).pipe(
|
||||
concatMap(res => this.handleSyncResult$(res)),
|
||||
),
|
||||
),
|
||||
finalize(() => {
|
||||
console.log('FINALIZING')
|
||||
this.sequenceStore.sequence = 0
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
private handleSyncResult$ (res: Result): Observable<void> {
|
||||
console.log('PATCHDB - handleSyncResult$(): ', res)
|
||||
switch (res) {
|
||||
case Result.DUMPED: return of(this.cancelStashTimeout.next('')) // cancel stash timeout
|
||||
case Result.REVISED: return of(this.cancelStashTimeout.next('')) // cancel stash timeout
|
||||
case Result.STASHED: return this.handleStashTimeout$() // call error after timeout
|
||||
case Result.ERROR: return this.handlePatchError$() // call error immediately
|
||||
default: return EMPTY
|
||||
}
|
||||
}
|
||||
|
||||
private handleStashTimeout$ (): Observable<void> {
|
||||
console.log('PATCHDB - handleStashTimeout$()')
|
||||
return timer(this.timeoutForMissingRevision).pipe(
|
||||
tap(time => console.log('PATCHDB - timeout for missing patch:', time)),
|
||||
takeUntil(this.cancelStashTimeout),
|
||||
take(1),
|
||||
concatMap(() => this.handlePatchError$()),
|
||||
)
|
||||
}
|
||||
|
||||
// Here flattened concatMaps are functionally equivalent to nested because the source observable emits at most once.
|
||||
private handlePatchError$ (): Observable<void> {
|
||||
return from(this.http.getDump()).pipe(
|
||||
concatMap(dump => this.sequenceStore.update$(dump)),
|
||||
// note the above is a "dump" update, which will always return DUMPED (it can't error)
|
||||
// handleSyncResult will therefore never re-call handlePatchError()
|
||||
concatMap(res => this.handleSyncResult$(res)),
|
||||
catchError(e => {
|
||||
console.error(e)
|
||||
return EMPTY
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export type PatchDbConfig<T> = {
|
||||
http: Http<T>
|
||||
sources: Source<T>[]
|
||||
bootstrapper?: Bootstrapper<T>
|
||||
timeoutForMissingRevision?: number
|
||||
}
|
||||
|
||||
export enum PatchOp {
|
||||
ADD = 'add',
|
||||
REMOVE = 'remove',
|
||||
REPLACE = 'replace',
|
||||
}
|
||||
|
||||
export interface Http<T> {
|
||||
getRevisions (since: number): Promise<Revision[] | Dump<T>>
|
||||
getDump (): Promise<Dump<T>>
|
||||
}
|
||||
|
||||
export interface Bootstrapper<T> {
|
||||
init (): Promise<DBCache<T>>
|
||||
update (cache: DBCache<T>): Promise<void>
|
||||
clear (): Promise<void>
|
||||
}
|
||||
|
||||
export interface DBCache<T>{
|
||||
sequence: number,
|
||||
data: T
|
||||
}
|
||||
211
client/lib/sequence-store.ts
Normal file
211
client/lib/sequence-store.ts
Normal file
@@ -0,0 +1,211 @@
|
||||
import { BehaviorSubject, Observable } from 'rxjs'
|
||||
import { filter } from 'rxjs/operators'
|
||||
import { Store } from './store'
|
||||
import { DBCache } from './patch-db'
|
||||
import { patchDocument } from './store'
|
||||
import { ActionSerializer } from './action-serializer'
|
||||
import { Operation } from 'fast-json-patch'
|
||||
import BTree from 'sorted-btree'
|
||||
|
||||
export class SequenceStore<T extends object> {
|
||||
private readonly lastState$: BehaviorSubject<DBCache<T>> = new BehaviorSubject(undefined as any)
|
||||
private readonly actionSerializer = new ActionSerializer()
|
||||
private preTemps: T
|
||||
private stashed = new BTree<number, Revision>()
|
||||
private temps: UpdateTemp[] = []
|
||||
private sequence$ = new BehaviorSubject(0)
|
||||
|
||||
constructor (
|
||||
readonly store: Store<T>,
|
||||
initialSequence: number,
|
||||
) {
|
||||
const data = store.peek
|
||||
this.preTemps = data
|
||||
this.commit({ data, sequence: initialSequence }, [])
|
||||
}
|
||||
|
||||
get sequence (): number { return this.sequence$.getValue() }
|
||||
set sequence (seq: number) { this.sequence$.next(seq) }
|
||||
|
||||
// subscribe to watch$ to get sequence + T feed, e.g. for caching and bootstrapping from a cache
|
||||
watch$ (): Observable<DBCache<T>> {
|
||||
return this.lastState$.pipe(filter(a => !!a))
|
||||
}
|
||||
|
||||
update$ (update: Update<T>): Observable<Result> {
|
||||
return this.actionSerializer.run$(() => {
|
||||
if (isTemp(update)) {
|
||||
return this.updateTemp(update)
|
||||
} else {
|
||||
return this.updateReal(update)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
viewRevisions (): Revision[] {
|
||||
// return this.revisions.filter(a => !!a)
|
||||
return this.stashed.valuesArray()
|
||||
}
|
||||
|
||||
private updateReal (update: UpdateReal<T>): Result {
|
||||
if (update.expireId) { this.temps = this.temps.filter(temp => temp.expiredBy !== update.expireId) }
|
||||
if (update.id <= this.sequence) return Result.NOOP
|
||||
|
||||
const { result, dbCache, revisionsToDelete } = isDump(update) ?
|
||||
this.dump(update) :
|
||||
this.revise(update)
|
||||
|
||||
this.preTemps = dbCache.data
|
||||
const afterTemps = this.stageSeqTemps(dbCache)
|
||||
this.commit(afterTemps, revisionsToDelete)
|
||||
return result
|
||||
}
|
||||
|
||||
private updateTemp (update: UpdateTemp): Result {
|
||||
this.temps.push(update)
|
||||
const data = patchDocument(update.patch, this.store.peek)
|
||||
const res = {
|
||||
data,
|
||||
sequence: this.sequence,
|
||||
}
|
||||
this.commit(res, [])
|
||||
return Result.TEMP
|
||||
}
|
||||
|
||||
private commit (res: DBCache<T>, revisionsToDelete: number[]): void {
|
||||
const { data, sequence } = res
|
||||
this.stashed.deleteKeys(revisionsToDelete)
|
||||
this.sequence$.next(sequence)
|
||||
this.store.set(data)
|
||||
this.lastState$.next({ data, sequence })
|
||||
}
|
||||
|
||||
private dump (dump: Dump<T>): { result: Result, dbCache: DBCache<T>, revisionsToDelete: number[] } {
|
||||
try {
|
||||
const oldRevisions = this.stashed.filter((key, _) => key < dump.id).keysArray()
|
||||
const { dbCache, revisionsToDelete } = this.processRevisions(dump.value, dump.id)
|
||||
return {
|
||||
result: Result.DUMPED,
|
||||
dbCache,
|
||||
revisionsToDelete: oldRevisions.concat(revisionsToDelete),
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`Dump error for ${JSON.stringify(dump)}: `, e)
|
||||
return {
|
||||
result: Result.ERROR,
|
||||
dbCache: {
|
||||
data: this.preTemps,
|
||||
sequence: this.sequence,
|
||||
},
|
||||
revisionsToDelete: [],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private revise (revision: Revision): { result: Result, dbCache: DBCache<T>, revisionsToDelete: number[] } {
|
||||
this.stashed.set(revision.id, revision)
|
||||
try {
|
||||
return this.processRevisions(this.preTemps, this.sequence)
|
||||
} catch (e) {
|
||||
console.error(`Revise error for ${JSON.stringify(revision)}: `, e)
|
||||
return {
|
||||
result: Result.ERROR,
|
||||
dbCache: {
|
||||
data: this.preTemps,
|
||||
sequence: this.sequence,
|
||||
},
|
||||
revisionsToDelete: [],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private stageSeqTemps<S extends DBCache<T>> (resultSoFar: S): S {
|
||||
return this.temps.reduce(({ data, ...rest }, nextTemp ) => {
|
||||
try {
|
||||
const nextContents = patchDocument(nextTemp.patch, data)
|
||||
return { data: nextContents, ...rest } as S
|
||||
} catch (e) {
|
||||
console.error(`Skipping temporary patch ${JSON.stringify(nextTemp)} due to exception: `, e)
|
||||
return { data, ...rest } as S
|
||||
}
|
||||
}, resultSoFar)
|
||||
}
|
||||
|
||||
private processRevisions (data: T, sequence: number): { result: Result, dbCache: DBCache<T>, revisionsToDelete: number[] } {
|
||||
const applicableRevisions = this.applicableRevisions(sequence)
|
||||
|
||||
console.log('APPLICABLE: ', applicableRevisions)
|
||||
|
||||
if (!applicableRevisions.length) {
|
||||
return {
|
||||
result: Result.STASHED,
|
||||
dbCache: {
|
||||
data,
|
||||
sequence,
|
||||
},
|
||||
revisionsToDelete: [],
|
||||
}
|
||||
}
|
||||
|
||||
const revisionsToDelete: number[] = []
|
||||
const toReturn = applicableRevisions.reduce(({ data, sequence }, revision) => {
|
||||
const nextContents = patchDocument(revision.patch, data)
|
||||
const nextSequence = sequence + 1
|
||||
revisionsToDelete.push(revision.id) // @TODO original was `revisionsToDelete.concat([seqPatch.id])`, why?
|
||||
return { data: nextContents, sequence: nextSequence }
|
||||
}, { data, sequence })
|
||||
|
||||
return {
|
||||
result: Result.REVISED,
|
||||
dbCache: toReturn,
|
||||
revisionsToDelete,
|
||||
}
|
||||
}
|
||||
|
||||
private applicableRevisions (sequence: number): Revision[] {
|
||||
const toReturn = [] as Revision[]
|
||||
|
||||
let i = sequence
|
||||
while (true) {
|
||||
i++
|
||||
const next = this.stashed.get(i)
|
||||
if (next) {
|
||||
toReturn.push(next)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return toReturn
|
||||
}
|
||||
}
|
||||
|
||||
export enum Result {
|
||||
DUMPED = 'DUMPED', // store was dumped/replaced
|
||||
REVISED = 'REVISED', // store was revised
|
||||
TEMP = 'TEMP', // store was revised temporarily
|
||||
STASHED = 'STASHED', // attempted to revise store but sequence too high. revision stashed for later
|
||||
ERROR = 'ERROR', // attempted to revise/dump store, but failed
|
||||
NOOP = 'NOOP', // sequence too low, update ignored
|
||||
}
|
||||
|
||||
// revise a collection of nodes.
|
||||
export type Revision = { id: number, patch: Operation[], expireId: string | null }
|
||||
// dump/replace the entire store with T
|
||||
export type Dump<T> = { id: number, value: T, expireId: string | null }
|
||||
|
||||
export type Update<T> = UpdateReal<T> | UpdateTemp
|
||||
export type UpdateReal<T> = Revision | Dump<T>
|
||||
export type UpdateTemp = Omit<Revision, 'id' | 'expireId'> & { expiredBy : string }
|
||||
|
||||
function isTemp<T> (s: Update<T>): s is UpdateTemp {
|
||||
return !!(s as any).expiredBy
|
||||
}
|
||||
|
||||
function isRevision<T> (s: Update<T>): s is Revision {
|
||||
return !isTemp(s) && !!(s as any).patch
|
||||
}
|
||||
|
||||
function isDump<T> (s: UpdateReal<T>): s is Dump<T> {
|
||||
return !isTemp(s) && !!(s as any).value
|
||||
}
|
||||
56
client/lib/source/poll-source.ts
Normal file
56
client/lib/source/poll-source.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { BehaviorSubject, concat, from, Observable, of } from 'rxjs'
|
||||
import { catchError, concatMap, delay, skip, switchMap, take, tap } from 'rxjs/operators'
|
||||
import { Http } from '../patch-db'
|
||||
import { UpdateReal } from '../sequence-store'
|
||||
import { Source } from './source'
|
||||
|
||||
export type PollConfig = {
|
||||
cooldown: number
|
||||
}
|
||||
|
||||
export class PollSource<T> implements Source<T> {
|
||||
|
||||
constructor (
|
||||
private readonly pollConfig: PollConfig,
|
||||
private readonly http: Http<T>,
|
||||
) { }
|
||||
|
||||
watch$ (sequence$: Observable<number>): Observable<UpdateReal<T>> {
|
||||
console.log('POLL_SOURCE - watch$()')
|
||||
|
||||
const polling$ = new BehaviorSubject('')
|
||||
|
||||
const updates$ = of('').pipe(
|
||||
concatMap(_ => sequence$),
|
||||
take(1),
|
||||
tap(_ => console.log('making request')),
|
||||
concatMap(seq => this.http.getRevisions(seq)),
|
||||
catchError(e => {
|
||||
console.error(e)
|
||||
return of([])
|
||||
}),
|
||||
tap(_ => console.log('request complete')),
|
||||
)
|
||||
|
||||
const delay$ = of([]).pipe(
|
||||
tap(_ => console.log('starting cooldown')),
|
||||
delay(this.pollConfig.cooldown),
|
||||
tap(_ => console.log('cooldown finished')),
|
||||
tap(_ => polling$.next('')),
|
||||
skip(1),
|
||||
)
|
||||
|
||||
const poll$ = concat(updates$, delay$)
|
||||
|
||||
return polling$.pipe(
|
||||
switchMap(_ => poll$),
|
||||
concatMap(res => {
|
||||
if (Array.isArray(res)) {
|
||||
return from(res) // takes Revision[] and converts it into Observable<Revision>
|
||||
} else {
|
||||
return of(res) // takes Dump<T> and converts it into Observable<Dump<T>>
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
6
client/lib/source/source.ts
Normal file
6
client/lib/source/source.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { Observable } from 'rxjs'
|
||||
import { Update } from '../sequence-store'
|
||||
|
||||
export interface Source<T> {
|
||||
watch$ (sequence$?: Observable<number>): Observable<Update<T>>
|
||||
}
|
||||
28
client/lib/source/ws-source.ts
Normal file
28
client/lib/source/ws-source.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { Observable } from 'rxjs'
|
||||
import { webSocket, WebSocketSubject, WebSocketSubjectConfig } from 'rxjs/webSocket'
|
||||
import { UpdateReal } from '../sequence-store'
|
||||
import { Source } from './source'
|
||||
|
||||
export class WebsocketSource<T> implements Source<T> {
|
||||
private websocket$: WebSocketSubject<UpdateReal<T>>
|
||||
|
||||
constructor (
|
||||
readonly url: string,
|
||||
) {
|
||||
const fullConfig: WebSocketSubjectConfig<UpdateReal<T>> = {
|
||||
url,
|
||||
openObserver: {
|
||||
next: () => console.log('WebSocket connection open'),
|
||||
},
|
||||
closeObserver: {
|
||||
next: () => console.log('WebSocket connection closed'),
|
||||
},
|
||||
closingObserver: {
|
||||
next: () => console.log('Websocket subscription cancelled, websocket closing'),
|
||||
},
|
||||
}
|
||||
this.websocket$ = webSocket(fullConfig)
|
||||
}
|
||||
|
||||
watch$ (): Observable<UpdateReal<T>> { return this.websocket$.asObservable() }
|
||||
}
|
||||
48
client/lib/store.ts
Normal file
48
client/lib/store.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { from, Observable } from 'rxjs'
|
||||
import { applyPatch, Operation } from 'fast-json-patch'
|
||||
import { observable, runInAction } from 'mobx'
|
||||
import { toStream } from 'mobx-utils'
|
||||
|
||||
export class Store<T extends object> {
|
||||
private o: { data: T }
|
||||
|
||||
constructor (data: T) {
|
||||
this.o = observable({ data })
|
||||
}
|
||||
|
||||
get peek (): T { return this.o.data }
|
||||
|
||||
watch$ (): Observable<T>
|
||||
watch$<P1 extends keyof T> (p1: P1): Observable<T[P1]>
|
||||
watch$<P1 extends keyof T, P2 extends keyof T[P1]> (p1: P1, p2: P2): Observable<T[P1][P2]>
|
||||
watch$<P1 extends keyof T, P2 extends keyof T[P1], P3 extends keyof T[P1][P2]> (p1: P1, p2: P2, p3: P3): Observable<T[P1][P2][P3]>
|
||||
watch$<P1 extends keyof T, P2 extends keyof T[P1], P3 extends keyof T[P1][P2], P4 extends keyof T[P1][P2][P3]> (p1: P1, p2: P2, p3: P3, p4: P4): Observable<T[P1][P2][P3][P4]>
|
||||
watch$<P1 extends keyof T, P2 extends keyof T[P1], P3 extends keyof T[P1][P2], P4 extends keyof T[P1][P2][P3], P5 extends keyof T[P1][P2][P3][P4]> (p1: P1, p2: P2, p3: P3, p4: P4, p5: P5): Observable<T[P1][P2][P3][P4][P5]>
|
||||
watch$<P1 extends keyof T, P2 extends keyof T[P1], P3 extends keyof T[P1][P2], P4 extends keyof T[P1][P2][P3], P5 extends keyof T[P1][P2][P3][P4], P6 extends keyof T[P1][P2][P3][P4][P5]> (p1: P1, p2: P2, p3: P3, p4: P4, p5: P5, p6: P6): Observable<T[P1][P2][P3][P4][P5][P6]>
|
||||
watch$ (...args: (string | number)[]): Observable<any> {
|
||||
return from(toStream(() => this.peekAccess(...args), true))
|
||||
}
|
||||
|
||||
set (data: T): void {
|
||||
runInAction(() => this.o.data = data)
|
||||
}
|
||||
|
||||
applyPatchDocument (patch: Operation[]): { oldDocument: T, newDocument: T } {
|
||||
const oldDocument = this.o.data
|
||||
const newDocument = patchDocument(patch, oldDocument)
|
||||
this.set(newDocument)
|
||||
return { oldDocument, newDocument }
|
||||
}
|
||||
|
||||
private peekAccess (...args: (string | number)[]): any {
|
||||
try {
|
||||
return args.reduce((acc, next) => (acc as any)[`${next}`], this.o.data)
|
||||
} catch (e) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function patchDocument<T> (patch: Operation[], doc: T): T {
|
||||
return applyPatch(doc, patch, true, false).newDocument
|
||||
}
|
||||
2960
client/package-lock.json
generated
Normal file
2960
client/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
35
client/package.json
Normal file
35
client/package.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "patch-db",
|
||||
"version": "1.0.0",
|
||||
"description": "observable db based on json-patch",
|
||||
"author": "Start9 Labs, Inc",
|
||||
"license": "MIT",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "mocha -r ts-node/register tests/**/*.test.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"fast-json-patch": "^3.0.0-1",
|
||||
"jsonpointer": "^4.1.0",
|
||||
"mobx": "^6.1.4",
|
||||
"mobx-utils": "^6.0.3",
|
||||
"rxjs": "^6.6.3",
|
||||
"sorted-btree": "^1.5.0",
|
||||
"uuid": "^8.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chai": "^4.2.14",
|
||||
"@types/chai-string": "^1.4.2",
|
||||
"@types/mocha": "^8.2.0",
|
||||
"@types/node": "^15.0.0",
|
||||
"@types/uuid": "^8.3.0",
|
||||
"chai": "^4.2.0",
|
||||
"chai-string": "^1.5.0",
|
||||
"mocha": "^8.2.1",
|
||||
"tslint": "^6.1.0",
|
||||
"ts-node": "^9.1.1",
|
||||
"typescript": "4.1.5"
|
||||
}
|
||||
}
|
||||
26
client/tests/mocks/bootstrapper.mock.ts
Normal file
26
client/tests/mocks/bootstrapper.mock.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { Bootstrapper, DBCache } from '../../lib/patch-db'
|
||||
|
||||
export class MockBootstrapper<T> implements Bootstrapper<T> {
|
||||
|
||||
constructor (
|
||||
private sequence: number = 0,
|
||||
private data: T = { } as T,
|
||||
) { }
|
||||
|
||||
async init (): Promise<DBCache<T>> {
|
||||
return {
|
||||
sequence: this.sequence,
|
||||
data: this.data as T,
|
||||
}
|
||||
}
|
||||
|
||||
async update (cache: DBCache<T>): Promise<void> {
|
||||
this.sequence = cache.sequence
|
||||
this.data = cache.data
|
||||
}
|
||||
|
||||
async clear (): Promise<void> {
|
||||
this.sequence = 0
|
||||
this.data = { } as T
|
||||
}
|
||||
}
|
||||
17
client/tests/mocks/http.mock.ts
Normal file
17
client/tests/mocks/http.mock.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { Http } from '../../lib/patch-db'
|
||||
import { Revision, Dump } from '../../lib/sequence-store'
|
||||
|
||||
export class MockHttp<T> implements Http<T> {
|
||||
constructor (private readonly mockData: {
|
||||
getSequences: Revision[],
|
||||
getDump: Dump<T>
|
||||
}) { }
|
||||
|
||||
getRevisions (): Promise<Revision[]> {
|
||||
return Promise.resolve(this.mockData.getSequences)
|
||||
}
|
||||
|
||||
getDump (): Promise<Dump<T>> {
|
||||
return Promise.resolve(this.mockData.getDump)
|
||||
}
|
||||
}
|
||||
1
client/tests/mocks/mock-data.json
Normal file
1
client/tests/mocks/mock-data.json
Normal file
File diff suppressed because one or more lines are too long
18
client/tests/mocks/source.mock.ts
Normal file
18
client/tests/mocks/source.mock.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { Observable } from 'rxjs'
|
||||
import { UpdateReal } from '../../lib/sequence-store'
|
||||
import { Source } from '../../lib/source/source'
|
||||
|
||||
export class MockSource<T> implements Source<T> {
|
||||
|
||||
constructor (
|
||||
private readonly mockData: Observable<UpdateReal<T>>,
|
||||
) { }
|
||||
|
||||
watch$ (): Observable<UpdateReal<T>> {
|
||||
return this.mockData
|
||||
}
|
||||
|
||||
start (): void { }
|
||||
|
||||
stop (): void { }
|
||||
}
|
||||
90
client/tests/patch-db.test.ts
Normal file
90
client/tests/patch-db.test.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { expect } from 'chai'
|
||||
import { TestScheduler } from 'rxjs/testing'
|
||||
import { PatchDB } from '../lib/patch-db'
|
||||
import { MockSource } from './mocks/source.mock'
|
||||
import { MockHttp } from './mocks/http.mock'
|
||||
import { PatchOp } from '../lib/patch-db'
|
||||
import { from } from 'rxjs'
|
||||
import { MockBootstrapper } from './mocks/bootstrapper.mock'
|
||||
import { UpdateReal } from '../lib/sequence-store'
|
||||
import { RemoveOperation } from 'fast-json-patch'
|
||||
import 'chai-string'
|
||||
|
||||
type Test = { a: string, b: number[], c: object, newKey?: string }
|
||||
|
||||
describe('patch db', function () {
|
||||
let scheduler: TestScheduler
|
||||
|
||||
beforeEach(() => {
|
||||
scheduler = new TestScheduler((actual, expected) => {
|
||||
// console.log('actual', JSON.stringify(actual))
|
||||
// console.log('expected', JSON.stringify(expected))
|
||||
expect(actual).eql(expected)
|
||||
})
|
||||
})
|
||||
|
||||
it('dumps', () => {
|
||||
scheduler.run(({ expectObservable, cold }) => {
|
||||
const initialData: Test = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const bootstrapper = new MockBootstrapper(0, initialData)
|
||||
const http = new MockHttp( { getSequences: [], getDump: { id: 0, value: { }, expireId: null } } )
|
||||
const updates = {
|
||||
a: { id: 1, value: { a: 'value1', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }, expireId: null },
|
||||
b: { id: 3, value: { a: 'value3', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }, expireId: null },
|
||||
c: { id: 2, value: { a: 'value2', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }, expireId: null }, // ooo for fun
|
||||
}
|
||||
const source = new MockSource<Test>(
|
||||
cold(Object.keys(updates).join(''), updates),
|
||||
)
|
||||
|
||||
PatchDB.init({ sources: [source], http, bootstrapper }).then(pdb => {
|
||||
pdb.sync$().subscribe()
|
||||
expectObservable(pdb.store.watch$()).toBe('ab-', { a: updates.a.value, b: updates.b.value })
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('replaces + adds', () => {
|
||||
scheduler.run( ({ expectObservable, cold }) => {
|
||||
const initialData: Test = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: Test = { a: 'value1', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 }, newKey: 'newValue' }
|
||||
const bootstrapper = new MockBootstrapper(0, initialData )
|
||||
const http = new MockHttp({ getSequences: [], getDump: { id: 0, value: { }, expireId: null } } )
|
||||
const updates = {
|
||||
a: { id: 1, value: { a: 'value1', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }, expireId: null },
|
||||
b: { id: 2, patch: [{ op: PatchOp.ADD, value: 'newValue', path: '/newKey' }], expireId: null},
|
||||
}
|
||||
const source = new MockSource<Test>(
|
||||
cold(Object.keys(updates).join(''), updates),
|
||||
)
|
||||
|
||||
PatchDB.init({ sources: [source], http, bootstrapper }).then(pdb => {
|
||||
pdb.sync$().subscribe()
|
||||
expectObservable(pdb.store.watch$()).toBe('ab', { a: updates.a.value, b: finalStore })
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('gets db dump with invalid patch', done => {
|
||||
const initialData: Test = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: Test = { a: 'value1', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 }, newKey: 'newValue' }
|
||||
const bootstrapper = new MockBootstrapper(0, initialData)
|
||||
const http = new MockHttp({ getSequences: [], getDump: { id: 2, value: finalStore, expireId: null } })
|
||||
const updates: UpdateReal<any>[] = [
|
||||
{ id: 1, value: { a: 'value1', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }, expireId: null },
|
||||
{ id: 2, patch: [{ op: PatchOp.REMOVE, path: '/newKey' } as RemoveOperation], expireId: null},
|
||||
]
|
||||
const source = new MockSource<Test>(
|
||||
from(updates),
|
||||
)
|
||||
|
||||
PatchDB.init({ sources: [source], http, bootstrapper }).then(pdb => {
|
||||
let counter = 0
|
||||
pdb.store.watch$().subscribe(i => {
|
||||
counter ++
|
||||
if (counter === 2) done()
|
||||
})
|
||||
pdb.sync$().subscribe()
|
||||
})
|
||||
})
|
||||
})
|
||||
195
client/tests/seq.test.ts
Normal file
195
client/tests/seq.test.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
import { expect } from 'chai'
|
||||
import { PatchOp } from '../lib/patch-db'
|
||||
import { TestScheduler } from 'rxjs/testing'
|
||||
import { Result, SequenceStore } from '../lib/sequence-store'
|
||||
import { concatMap, map } from 'rxjs/operators'
|
||||
import { Store } from '../lib/store'
|
||||
import { RemoveOperation } from 'fast-json-patch'
|
||||
import 'chai-string'
|
||||
|
||||
type TestStore = { a: string, b: number[], c?: { [key: string]: number } }
|
||||
describe('sequence store', function () {
|
||||
let scheduler: TestScheduler
|
||||
beforeEach(() => {
|
||||
scheduler = new TestScheduler((actual, expected) => {
|
||||
// console.log('actual', JSON.stringify(actual))
|
||||
// console.log('expected', JSON.stringify(expected))
|
||||
expect(actual).eql(expected)
|
||||
})
|
||||
})
|
||||
|
||||
it('dumps', done => {
|
||||
const initialStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: TestStore = { a: 'valueX', b: [0], c: { g: 10 } }
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
toTest.update$({ id: 5, value: finalStore, expireId: null }).subscribe(() => {
|
||||
expect(toTest.store.peek).eql(finalStore)
|
||||
expect(toTest.sequence).eql(5)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('ignores dump for id too low', done => {
|
||||
const initialStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: TestStore = { a: 'valueX', b: [0], c: { g: 10 } }
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
toTest.update$({ id: 5, value: finalStore, expireId: null }).pipe(concatMap(() =>
|
||||
toTest.update$({ id: 4, value: initialStore, expireId: null }),
|
||||
)).subscribe(() => {
|
||||
expect(toTest.store.peek).eql(finalStore)
|
||||
expect(toTest.sequence).eql(5)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('revises', done => {
|
||||
const initialStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: TestStore = { a: 'value', b: [1, 2, 3], c: { g: 10 } }
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
toTest.update$({ id: 1, patch: [{ op: PatchOp.REPLACE, value: finalStore.c, path: '/c' }], expireId: null }).subscribe(() => {
|
||||
expect(toTest.store.peek).eql(finalStore)
|
||||
expect(toTest.sequence).eql(1)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('saves a revision when not next in line', done => {
|
||||
const initialStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: TestStore = { a: 'value', b: [1, 2, 3], c: { g: 10 } }
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
toTest.update$({ id: 2, patch: [{ op: PatchOp.REPLACE, value: finalStore.c, path: '/c' }], expireId: null }).subscribe(() => {
|
||||
expect(toTest.store.peek).eql(initialStore)
|
||||
expect(toTest.sequence).eql(0)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('applies saved revisions when contiguous revisions become available', done => {
|
||||
const initialStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: TestStore = { a: 'value', b: [1, 2, 3, 4], c: { g: 10 } }
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
toTest.update$({ id: 2, patch: [{ op: PatchOp.REPLACE, value: finalStore.c, path: '/c' }], expireId: null }).pipe(concatMap(() =>
|
||||
toTest.update$({ id: 1, patch: [{ op: PatchOp.ADD, value: 4, path: '/b/-' }], expireId: null }),
|
||||
)).subscribe(() => {
|
||||
expect(toTest.store.peek).eql(finalStore)
|
||||
expect(toTest.sequence).eql(2)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('applies saved revisions when contiguous revisions become available part 2', done => {
|
||||
const initialStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: TestStore = { a: 'value2', b: [0], c: { g: 10 } }
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
toTest.update$({ id: 2, patch: [{ op: PatchOp.REPLACE, value: finalStore.c, path: '/c' }], expireId: null }).pipe(concatMap(() =>
|
||||
toTest.update$({ id: 1, value: { a: 'value2', b: [0], c: { d: 1, e: 2, f: 3 } }, expireId: null }),
|
||||
)).subscribe(() => {
|
||||
expect(toTest.store.peek).eql(finalStore)
|
||||
expect(toTest.sequence).eql(2)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('wipes out stashed patches when sequence is force updated', done => {
|
||||
const initialStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: TestStore = { a: 'value2', b: [0], c: { g: 10 } }
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
// patch gets stashed
|
||||
expect(toTest.viewRevisions().length).eql(0)
|
||||
|
||||
toTest.update$({ id: 2, patch: [{ op: PatchOp.REPLACE, value: finalStore.c, path: '/c' }], expireId: null }).pipe(
|
||||
map(res => expect(res).eql(Result.STASHED) && expect(toTest.viewRevisions().length).eql(1)),
|
||||
concatMap(() => toTest.update$({ id: 3, value: finalStore, expireId: null })),
|
||||
map(res => expect(res).eql(Result.DUMPED) && expect(toTest.viewRevisions().length).eql(0)),
|
||||
).subscribe(() => done())
|
||||
})
|
||||
|
||||
it('emits sequence + state on updates (revisions)', () => {
|
||||
scheduler.run( ({ expectObservable, cold }) => {
|
||||
const initialStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: TestStore = { a: 'value2', b: [0], c: { g: 10 } }
|
||||
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
const expectedStream = 'ab'
|
||||
|
||||
cold('-b').subscribe(() => {
|
||||
toTest.update$({ id: 3, value: finalStore, expireId: null }).subscribe()
|
||||
})
|
||||
|
||||
expectObservable(toTest.watch$().pipe(
|
||||
map(cache => ({ sequence: cache.sequence, contents: cache.data})),
|
||||
)).toBe(expectedStream, {
|
||||
a: { sequence: 0, contents: initialStore },
|
||||
b: { sequence: 3, contents: finalStore },
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('emits sequence + state on updates (patch)', () => {
|
||||
scheduler.run( ({ expectObservable, cold }) => {
|
||||
const initialStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3, g: 4 } }
|
||||
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
const expectedStream = 'ab'
|
||||
|
||||
cold('-b').subscribe(() => {
|
||||
toTest.update$({ id: 1, patch: [{ op: PatchOp.ADD, path: '/c/g', value: 4 }], expireId: null }).subscribe()
|
||||
})
|
||||
|
||||
expectObservable(toTest.watch$().pipe(
|
||||
map(cache => ({ sequence: cache.sequence, contents: cache.data })),
|
||||
)).toBe(expectedStream, {
|
||||
a: { sequence: 0, contents: initialStore },
|
||||
b: { sequence: 1, contents: finalStore },
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('errors bubble out in results', done => {
|
||||
const initialStore : TestStore = { a: 'value' , b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const intermediaryStore : TestStore = { a: 'value' , b: [1, 2, 3] }
|
||||
const finalStore : TestStore = { a: 'value' , b: [1, 2, 3] }
|
||||
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
const patch1 = {
|
||||
id: 1,
|
||||
patch: [{ op: PatchOp.REMOVE, path: '/c' } as RemoveOperation],
|
||||
expireId: null,
|
||||
}
|
||||
|
||||
const patch2 = {
|
||||
id: 2,
|
||||
patch: [{ op: PatchOp.ADD, value: 4, path: '/c/g' }],
|
||||
expireId: null,
|
||||
}
|
||||
|
||||
toTest.update$(patch1).pipe(
|
||||
map(res => expect(res).eql(Result.REVISED) && expect(toTest.store.peek).eql(intermediaryStore)),
|
||||
concatMap(() => toTest.update$(patch2)),
|
||||
).subscribe(res => {
|
||||
expect(res).eql(Result.ERROR)
|
||||
expect(toTest.store.peek).eql(finalStore)
|
||||
expect(toTest.sequence).eql(1)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
214
client/tests/store.test.ts
Normal file
214
client/tests/store.test.ts
Normal file
@@ -0,0 +1,214 @@
|
||||
import { expect } from 'chai'
|
||||
import { PatchOp } from '../lib/patch-db'
|
||||
import { TestScheduler } from 'rxjs/testing'
|
||||
import { Store } from '../lib/store'
|
||||
import { tap } from 'rxjs/operators'
|
||||
import { AddOperation, RemoveOperation, ReplaceOperation } from 'fast-json-patch'
|
||||
import 'chai-string'
|
||||
|
||||
describe('rx store', function () {
|
||||
let scheduler: TestScheduler
|
||||
beforeEach(() => {
|
||||
scheduler = new TestScheduler((actual, expected) => {
|
||||
// console.log('actual', JSON.stringify(actual))
|
||||
// console.log('expected', JSON.stringify(expected))
|
||||
expect(actual).eql(expected)
|
||||
})
|
||||
})
|
||||
|
||||
it('returns old and new store state', () => {
|
||||
const initialStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const expectedFinalStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 }, newKey: 'newValue', newKey2: 'newValue2', newKey3: 'newValue3' }
|
||||
const toTest = new Store(initialStore)
|
||||
const add: AddOperation<string> = { op: PatchOp.ADD, value: 'newValue', path: '/newKey' }
|
||||
const add2: AddOperation<string> = { op: PatchOp.ADD, value: 'newValue2', path: '/newKey2' }
|
||||
const add3: AddOperation<string> = { op: PatchOp.ADD, value: 'newValue3', path: '/newKey3' }
|
||||
|
||||
const { oldDocument, newDocument} = toTest.applyPatchDocument([add, add2, add3])
|
||||
expect(oldDocument).eql(initialStore)
|
||||
expect(newDocument).eql(expectedFinalStore)
|
||||
})
|
||||
|
||||
it('adds', () => {
|
||||
scheduler.run( ({ expectObservable, cold }) => {
|
||||
const initialStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const expectedIntermediateStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 }, newKey: 'newValue', newKey2: 'newValue2' }
|
||||
const expectedFinalStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 }, newKey: 'newValue', newKey2: 'newValue2', newKey3: 'newValue3' }
|
||||
const toTest = new Store(initialStore)
|
||||
const add: AddOperation<string> = { op: PatchOp.ADD, value: 'newValue', path: '/newKey' }
|
||||
const add2: AddOperation<string> = { op: PatchOp.ADD, value: 'newValue2', path: '/newKey2' }
|
||||
const add3: AddOperation<string> = { op: PatchOp.ADD, value: 'newValue3', path: '/newKey3' }
|
||||
const expectedStream = 'abc'
|
||||
|
||||
cold('-bc', { b: [add, add2], c: [add3] }).subscribe(i => toTest.applyPatchDocument(i))
|
||||
expectObservable(toTest.watch$()).toBe(expectedStream, { a: initialStore, b: expectedIntermediateStore, c: expectedFinalStore })
|
||||
})
|
||||
})
|
||||
|
||||
it('adds + revises + removes', () => {
|
||||
scheduler.run( ({ expectObservable, cold }) => {
|
||||
const initialStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const expectedFinalStore = { a: 'value', b: [1, 2, 3], newKey: 'newValue', newKey2: 'newValue3' }
|
||||
const toTest = new Store(initialStore)
|
||||
const add: AddOperation<string> = { op: PatchOp.ADD, value: 'newValue', path: '/newKey' }
|
||||
const add2: AddOperation<string> = { op: PatchOp.ADD, value: 'newValue2', path: '/newKey2' }
|
||||
const revise: ReplaceOperation<string> = { op: PatchOp.REPLACE, value: 'newValue3', path: '/newKey2' }
|
||||
const remove: RemoveOperation = { op: PatchOp.REMOVE, path: '/c' }
|
||||
const expectedStream = 'ab'
|
||||
|
||||
cold('-b').subscribe(_ => toTest.applyPatchDocument([add, add2, revise, remove]))
|
||||
expectObservable(toTest.watch$()).toBe(expectedStream, { a: initialStore, b: expectedFinalStore })
|
||||
})
|
||||
})
|
||||
|
||||
it('serializes', done => {
|
||||
const initialStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const intermediaryStore = { a: 'value', b: [1, 2, 3], newKey: 'newValue', c: { d: 1, e: 2, f: 3 } }
|
||||
const toTest = new Store(initialStore)
|
||||
|
||||
const add: AddOperation<string> = { op: PatchOp.ADD, value: 'newValue', path: '/newKey' }
|
||||
const unAdd: RemoveOperation = { op: PatchOp.REMOVE, path: '/newKey' }
|
||||
|
||||
let i = 0
|
||||
toTest.watch$().subscribe(t => {
|
||||
if (i === 0) { expect(t).eql(initialStore) }
|
||||
if (i === 1) { expect(t).eql(intermediaryStore) }
|
||||
if (i === 2) { expect(t).eql(initialStore); done() }
|
||||
i += 1
|
||||
})
|
||||
toTest.applyPatchDocument([add])
|
||||
toTest.applyPatchDocument([unAdd])
|
||||
})
|
||||
|
||||
it('doesnt apply invalid patches', done => {
|
||||
const initialStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const toTest = new Store(initialStore)
|
||||
|
||||
const removeValid: RemoveOperation = { op: PatchOp.REMOVE, path: '/b' }
|
||||
const removeInvalid: RemoveOperation = { op: PatchOp.REMOVE, path: '/newKey' }
|
||||
try {
|
||||
toTest.applyPatchDocument([removeValid, removeInvalid])
|
||||
expect(true).eql('We expected an error here')
|
||||
} catch (e) {
|
||||
toTest.watch$().subscribe(t => {
|
||||
expect(t).eql(initialStore)
|
||||
done()
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
it('emits undefined when key disappears', done => {
|
||||
const initialStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const store = new Store(initialStore)
|
||||
|
||||
const remove: RemoveOperation = { op: PatchOp.REMOVE, path: '/c' }
|
||||
let counter = 0
|
||||
store.watch$('c', 'd').pipe(tap(() => counter++)).subscribe({
|
||||
next: i => {
|
||||
if (counter === 1) expect(i).eql(initialStore.c.d)
|
||||
if (counter === 2) expect(i).eql(undefined)
|
||||
if (counter === 2) done()
|
||||
},
|
||||
})
|
||||
store.applyPatchDocument([remove])
|
||||
})
|
||||
|
||||
it('when key returns, sub continues', done => {
|
||||
const initialStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const store = new Store(initialStore)
|
||||
|
||||
const remove: RemoveOperation = { op: PatchOp.REMOVE, path: '/c' }
|
||||
const reAdd: AddOperation<{ d: number }> = { op: PatchOp.ADD, path: '/c', value: { d: 1 } }
|
||||
let counter = 0
|
||||
store.watch$('c', 'd').pipe(tap(() => counter++)).subscribe({
|
||||
next: i => {
|
||||
if (counter === 1) expect(i).eql(initialStore.c.d)
|
||||
if (counter === 2) expect(i).eql(undefined)
|
||||
if (counter === 3) expect(i).eql(reAdd.value.d)
|
||||
if (counter === 3) done()
|
||||
},
|
||||
})
|
||||
store.applyPatchDocument([remove])
|
||||
store.applyPatchDocument([reAdd])
|
||||
})
|
||||
|
||||
it('watches a single property', () => {
|
||||
scheduler.run( ({ expectObservable, cold }) => {
|
||||
const initialStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const store = new Store(initialStore)
|
||||
const toTest = store.watch$('b', 1)
|
||||
|
||||
const revise: ReplaceOperation<number> = { op: PatchOp.REPLACE, value: 4, path: '/b/1' }
|
||||
|
||||
const expectedStream = 'ab'
|
||||
|
||||
cold('-b').subscribe(_ => store.applyPatchDocument([revise]))
|
||||
expectObservable(toTest).toBe(expectedStream, { a: initialStore.b[1], b: revise.value })
|
||||
})
|
||||
})
|
||||
|
||||
it('property only emits if it is updated', () => {
|
||||
scheduler.run( ({ expectObservable, cold }) => {
|
||||
const initialStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const store = new Store(initialStore)
|
||||
const toTest = store.watch$('b', 0)
|
||||
|
||||
const revise: ReplaceOperation<number> = { op: PatchOp.REPLACE, value: 4, path: '/b/1' }
|
||||
|
||||
const expectedStream = 'a-'
|
||||
|
||||
cold('-b').subscribe(_ => store.applyPatchDocument([revise]))
|
||||
expectObservable(toTest).toBe(expectedStream, { a: initialStore.b[0] })
|
||||
})
|
||||
})
|
||||
|
||||
it('only does the last updates', () => {
|
||||
scheduler.run( ({ expectObservable, cold }) => {
|
||||
const initialStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const store = new Store(initialStore)
|
||||
const toTest = store.watch$('b', 1)
|
||||
|
||||
const revise1: ReplaceOperation<number> = { op: PatchOp.REPLACE, value: 4, path: '/b/1' }
|
||||
const revise2: ReplaceOperation<number> = { op: PatchOp.REPLACE, value: 5, path: '/b/1' }
|
||||
|
||||
const expectedStream = 'ab'
|
||||
|
||||
cold('-b').subscribe(_ => store.applyPatchDocument([revise1, revise2]))
|
||||
expectObservable(toTest).toBe(expectedStream, { a: initialStore.b[1], b: revise2.value })
|
||||
})
|
||||
})
|
||||
|
||||
it('emits multiple updates', () => {
|
||||
scheduler.run( ({ expectObservable, cold }) => {
|
||||
const initialStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const store = new Store(initialStore)
|
||||
const toTest = store.watch$('b', 1)
|
||||
|
||||
const revise1: ReplaceOperation<number> = { op: PatchOp.REPLACE, value: 4, path: '/b/1' }
|
||||
const revise2: ReplaceOperation<number> = { op: PatchOp.REPLACE, value: 5, path: '/b/1' }
|
||||
|
||||
const expectedStream = 'abc'
|
||||
|
||||
cold('-bc', { b: revise1, c: revise2 }).subscribe(i => {
|
||||
store.applyPatchDocument([i])
|
||||
})
|
||||
expectObservable(toTest).toBe(expectedStream, { a: initialStore.b[1], b: revise1.value, c: revise2.value})
|
||||
})
|
||||
})
|
||||
|
||||
it('does a BIG store', () => {
|
||||
scheduler.run( ({ expectObservable, cold }) => {
|
||||
const fatty = require('./mocks/mock-data.json')
|
||||
const store = new Store(fatty)
|
||||
const toTest = store.watch$('kind')
|
||||
|
||||
const revise: ReplaceOperation<string> = { op: PatchOp.REPLACE, value: 'testing', path: '/kind' }
|
||||
const expectedStream = 'ab'
|
||||
|
||||
cold('-b', { b: revise }).subscribe(i => {
|
||||
store.applyPatchDocument([i])
|
||||
})
|
||||
expectObservable(toTest).toBe(expectedStream, { a: fatty['kind'], b: revise.value })
|
||||
})
|
||||
})
|
||||
})
|
||||
158
client/tests/temp-seq.test.ts
Normal file
158
client/tests/temp-seq.test.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import { expect } from 'chai'
|
||||
import { PatchOp } from '../lib/patch-db'
|
||||
import { TestScheduler } from 'rxjs/testing'
|
||||
import { Result, SequenceStore } from '../lib/sequence-store'
|
||||
import { concatMap, map } from 'rxjs/operators'
|
||||
import { Store } from '../lib/store'
|
||||
import { RemoveOperation } from 'fast-json-patch'
|
||||
import 'chai-string'
|
||||
|
||||
type TestStore = { a: string, b: number[], c?: { [key: string]: number } }
|
||||
describe('sequence store temp functionality', function () {
|
||||
let scheduler: TestScheduler
|
||||
beforeEach(() => {
|
||||
scheduler = new TestScheduler( (actual, expected) => {
|
||||
// console.log('actual', JSON.stringify(actual))
|
||||
// console.log('expected', JSON.stringify(expected))
|
||||
expect(actual).eql(expected)
|
||||
})
|
||||
})
|
||||
|
||||
it('applies a temp patch', done => {
|
||||
const initialStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: TestStore = { a: 'value', b: [1, 2, 3], c: { g: 10 } }
|
||||
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
toTest.update$({
|
||||
patch: [{ op: PatchOp.REPLACE, value: finalStore.c, path: '/c' }],
|
||||
expiredBy: 'expireMe',
|
||||
}).subscribe(() => {
|
||||
expect(toTest.store.peek).eql(finalStore)
|
||||
expect(toTest.sequence).eql(0)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('applies multiple temp patches', done => {
|
||||
const initialStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const finalStore: TestStore = { a: 'value', b: [0], c: { g: 10 } }
|
||||
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
const tempPatch1 = {
|
||||
patch: [{ op: PatchOp.REPLACE, value: finalStore.c, path: '/c' }],
|
||||
expiredBy: 'expireMe1',
|
||||
}
|
||||
|
||||
const tempPatch2 = {
|
||||
patch: [{ op: PatchOp.REPLACE, value: finalStore.b, path: '/b' }],
|
||||
expiredBy: 'expireMe2',
|
||||
}
|
||||
|
||||
toTest.update$(tempPatch1).pipe(concatMap(() =>
|
||||
toTest.update$(tempPatch2),
|
||||
)).subscribe(() => {
|
||||
expect(toTest.store.peek).eql(finalStore)
|
||||
expect(toTest.sequence).eql(0)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('expires a temp patch', done => {
|
||||
const initialStore: TestStore = { a: 'value', b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const intermediaryStore: TestStore = { a: 'value', b: [1, 2, 3], c: { g: 10 } }
|
||||
const finalStore: TestStore = { a: 'value', b: [0], c: { d: 1, e: 2, f: 3 } }
|
||||
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
const tempPatch = {
|
||||
patch: [{ op: PatchOp.REPLACE, value: intermediaryStore.c, path: '/c' }],
|
||||
expiredBy: 'expireMe',
|
||||
}
|
||||
|
||||
const expirePatch = {
|
||||
id: 1,
|
||||
patch: [{ op: PatchOp.REPLACE, value: finalStore.b, path: '/b' }],
|
||||
expireId: 'expireMe',
|
||||
}
|
||||
|
||||
toTest.update$(tempPatch).pipe(
|
||||
map(() => expect(toTest.store.peek).eql(intermediaryStore)),
|
||||
concatMap(() => toTest.update$(expirePatch)),
|
||||
).subscribe(() => {
|
||||
expect(toTest.store.peek).eql(finalStore)
|
||||
expect(toTest.sequence).eql(1)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('expires a temp patch beneath a second temp patch', done => {
|
||||
const initialStore : TestStore = { a: 'value' , b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const intermediaryStore : TestStore = { a: 'value' , b: [0] , c: { g: 10 } }
|
||||
const finalStore : TestStore = { a: 'valueX', b: [0] , c: { d: 1, e: 2, f: 3 } }
|
||||
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
const tempPatch = {
|
||||
patch: [{ op: PatchOp.REPLACE, value: intermediaryStore.c, path: '/c' }],
|
||||
expiredBy: 'expireMe',
|
||||
}
|
||||
|
||||
const tempPatch2 = {
|
||||
patch: [{ op: PatchOp.REPLACE, value: intermediaryStore.b, path: '/b' }],
|
||||
expiredBy: 'expireMe2',
|
||||
}
|
||||
|
||||
const expirePatch = {
|
||||
id: 1,
|
||||
patch: [{ op: PatchOp.REPLACE, value: finalStore.a, path: '/a' }],
|
||||
expireId: 'expireMe',
|
||||
}
|
||||
|
||||
toTest.update$(tempPatch).pipe(
|
||||
concatMap(() => toTest.update$(tempPatch2)),
|
||||
map(() => expect(toTest.store.peek).eql(intermediaryStore)),
|
||||
concatMap(() => toTest.update$(expirePatch)),
|
||||
).subscribe(() => {
|
||||
expect(toTest.store.peek).eql(finalStore)
|
||||
expect(toTest.sequence).eql(1)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('real patches are genuinely added beneath', done => {
|
||||
const initialStore : TestStore = { a: 'value' , b: [1, 2, 3], c: { d: 1, e: 2, f: 3 } }
|
||||
const intermediaryStore : TestStore = { a: 'value' , b: [1, 2, 3] }
|
||||
const finalStore : TestStore = { a: 'value' , b: [1, 2, 3] }
|
||||
|
||||
const store = new Store(initialStore)
|
||||
const toTest = new SequenceStore(store, 0)
|
||||
|
||||
const tempPatch = {
|
||||
patch: [{ op: PatchOp.REMOVE, path: '/c' } as RemoveOperation],
|
||||
expiredBy: 'expireMe',
|
||||
}
|
||||
|
||||
// this patch would error if the above had been a real patch and not a temp
|
||||
const realPatch = {
|
||||
id: 1,
|
||||
patch: [{ op: PatchOp.ADD, value: 4, path: '/c/g' }],
|
||||
expireId: null,
|
||||
}
|
||||
|
||||
toTest.update$(tempPatch).pipe(
|
||||
map(res => expect(res).eql(Result.TEMP) && expect(toTest.store.peek).eql(intermediaryStore)),
|
||||
concatMap(() => toTest.update$(realPatch)),
|
||||
).subscribe(res => {
|
||||
expect(res).eql(Result.REVISED)
|
||||
expect(toTest.store.peek).eql(finalStore)
|
||||
expect(toTest.sequence).eql(1)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
69
client/tsconfig.json
Normal file
69
client/tsconfig.json
Normal file
@@ -0,0 +1,69 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
/* Basic Options */
|
||||
// "incremental": true, /* Enable incremental compilation */
|
||||
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||
// "lib": [], /* Specify library files to be included in the compilation. */
|
||||
// "allowJs": true, /* Allow javascript files to be compiled. */
|
||||
// "checkJs": true, /* Report errors in .js files. */
|
||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
|
||||
// "declaration": true, /* Generates corresponding '.d.ts' file. */
|
||||
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
|
||||
// "sourceMap": true, /* Generates corresponding '.map' file. */
|
||||
// "outFile": "./", /* Concatenate and emit output to single file. */
|
||||
// "outDir": "./", /* Redirect output structure to the directory. */
|
||||
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
// "composite": true, /* Enable project compilation */
|
||||
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
|
||||
// "removeComments": true, /* Do not emit comments to output. */
|
||||
// "noEmit": true, /* Do not emit outputs. */
|
||||
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
|
||||
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
|
||||
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
|
||||
|
||||
/* Strict Type-Checking Options */
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||
// "strictNullChecks": true, /* Enable strict null checks. */
|
||||
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
|
||||
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
|
||||
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
|
||||
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
|
||||
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
|
||||
|
||||
"declaration": true,
|
||||
"outDir": "./dist",
|
||||
|
||||
/* Additional Checks */
|
||||
// "noUnusedLocals": true, /* Report errors on unused locals. */
|
||||
// "noUnusedParameters": true, /* Report errors on unused parameters. */
|
||||
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
|
||||
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
|
||||
|
||||
/* Module Resolution Options */
|
||||
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
|
||||
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
|
||||
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
|
||||
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||
// "typeRoots": [], /* List of folders to include type definitions from. */
|
||||
// "types": [], /* Type declaration files to be included in compilation. */
|
||||
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||
|
||||
/* Source Map Options */
|
||||
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
|
||||
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
|
||||
|
||||
/* Experimental Options */
|
||||
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
|
||||
|
||||
/* Advanced Options */
|
||||
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
|
||||
}
|
||||
}
|
||||
47
client/tslint.json
Normal file
47
client/tslint.json
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"rules": {
|
||||
"no-unused-variable": true,
|
||||
"no-unused-expression": true,
|
||||
"space-before-function-paren": true,
|
||||
"semicolon": [
|
||||
true,
|
||||
"never"
|
||||
],
|
||||
"no-trailing-whitespace": true,
|
||||
"indent": [
|
||||
true,
|
||||
"spaces",
|
||||
2
|
||||
],
|
||||
"whitespace": [
|
||||
true,
|
||||
"check-branch",
|
||||
"check-decl",
|
||||
"check-module",
|
||||
"check-operator",
|
||||
"check-separator",
|
||||
"check-rest-spread",
|
||||
"check-type",
|
||||
"check-typecast",
|
||||
"check-type-operator",
|
||||
"check-preblock",
|
||||
"check-postbrace"
|
||||
],
|
||||
"trailing-comma": [
|
||||
true,
|
||||
{
|
||||
"multiline": {
|
||||
"objects": "always",
|
||||
"arrays": "always",
|
||||
"functions": "always",
|
||||
"typeLiterals": "never"
|
||||
},
|
||||
"singleline": "never"
|
||||
}
|
||||
],
|
||||
"quotemark": [
|
||||
true,
|
||||
"single"
|
||||
]
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user