Skip to content

Commit

Permalink
Merge pull request #471 from streamich/debug-file
Browse files Browse the repository at this point in the history
Debug file
  • Loading branch information
streamich authored Dec 5, 2023
2 parents c24cea3 + 653dd80 commit fa08c52
Show file tree
Hide file tree
Showing 11 changed files with 476 additions and 62 deletions.
13 changes: 11 additions & 2 deletions src/json-crdt-patch/clock/clock.ts
Original file line number Diff line number Diff line change
Expand Up @@ -196,10 +196,10 @@ export class ClockVector extends LogicalClock implements IClockVector {
}

/**
* Returns a human-readable string representation of the vector clock.
* Returns a human-readable string representation of the clock vector.
*
* @param tab String to use for indentation.
* @returns Human-readable string representation of the vector clock.
* @returns Human-readable string representation of the clock vector.
*/
public toString(tab: string = ''): string {
const last = this.peers.size;
Expand Down Expand Up @@ -236,4 +236,13 @@ export class ServerClockVector extends LogicalClock implements IClockVector {
public fork(): ServerClockVector {
return new ServerClockVector(SESSION.SERVER, this.time);
}

/**
* Returns a human-readable string representation of the clock vector.
*
* @returns Human-readable string representation of the clock vector.
*/
public toString(): string {
return `clock ${this.sid}.${this.time}`;
}
}
162 changes: 128 additions & 34 deletions src/json-crdt/file/File.ts
Original file line number Diff line number Diff line change
@@ -1,29 +1,101 @@
import {Model} from "../model";
import {PatchLog} from "./PatchLog";
import {FileModelEncoding} from "./constants";
import {Model} from '../model';
import {PatchLog} from './PatchLog';
import {FileModelEncoding} from './constants';
import {Encoder as SidecarEncoder} from '../codec/sidecar/binary/Encoder';
import {Decoder as SidecarDecoder} from '../codec/sidecar/binary/Decoder';
import {Encoder as StructuralEncoderCompact} from '../codec/structural/compact/Encoder';
import {Encoder as StructuralEncoderVerbose} from '../codec/structural/verbose/Encoder';
import {encode as encodeCompact} from '../../json-crdt-patch/codec/compact/encode';
import {encode as encodeVerbose} from '../../json-crdt-patch/codec/verbose/encode';
import type * as types from "./types";
import {Writer} from '../../util/buffers/Writer';
import {CborEncoder} from '../../json-pack/cbor/CborEncoder';
import {JsonEncoder} from '../../json-pack/json/JsonEncoder';
import {printTree} from '../../util/print/printTree';
import {decodeModel, decodeNdjsonComponents, decodePatch, decodeSeqCborComponents} from './util';
import {Patch} from '../../json-crdt-patch';
import type * as types from './types';
import type {Printable} from '../../util/print/types';

export class File {
public static fromModel(model: Model): File {
export class File implements Printable {
public static unserialize(components: types.FileReadSequence): File {
const [view, metadata, model, history, ...frontier] = components;
const modelFormat = metadata[1];
let decodedModel: Model<any> | null = null;
if (model) {
const isSidecar = modelFormat === FileModelEncoding.SidecarBinary;
if (isSidecar) {
const decoder = new SidecarDecoder();
if (!(model instanceof Uint8Array)) throw new Error('NOT_BLOB');
decodedModel = decoder.decode(view, model);
} else {
decodedModel = decodeModel(model);
}
}
let log: PatchLog | null = null;
if (history) {
const [start, patches] = history;
if (start) {
const startModel = decodeModel(start);
log = new PatchLog(startModel);
for (const patch of patches) log.push(decodePatch(patch));
}
}
if (!log) throw new Error('NO_HISTORY');
if (!decodedModel) decodedModel = log.replayToEnd();
if (frontier.length) {
for (const patch of frontier) {
const patchDecoded = decodePatch(patch);
decodedModel.applyPatch(patchDecoded);
log.push(patchDecoded);
}
}
const file = new File(decodedModel, log);
return file;
}

public static fromNdjson(blob: Uint8Array): File {
const components = decodeNdjsonComponents(blob);
return File.unserialize(components as types.FileReadSequence);
}

public static fromSeqCbor(blob: Uint8Array): File {
const components = decodeSeqCborComponents(blob);
return File.unserialize(components as types.FileReadSequence);
}

public static fromModel(model: Model<any>): File {
return new File(model, PatchLog.fromModel(model));
}

constructor(
public readonly model: Model,
public readonly history: PatchLog,
) {}
constructor(public readonly model: Model, public readonly log: PatchLog) {}

public apply(patch: Patch): void {
const id = patch.getId();
if (!id) return;
this.model.applyPatch(patch);
this.log.push(patch);
}

public sync(): () => void {
const {model, log} = this;
const api = model.api;
const autoflushUnsubscribe = api.autoFlush();
const onPatchUnsubscribe = api.onPatch.listen((patch) => {
log.push(patch);
});
const onFlushUnsubscribe = api.onFlush.listen((patch) => {
log.push(patch);
});
return () => {
autoflushUnsubscribe();
onPatchUnsubscribe();
onFlushUnsubscribe();
};
}

public serialize(params: types.FileSerializeParams = {}): types.FileWriteSequence {
const view = this.model.view();
const metadata: types.FileMetadata = [
{},
FileModelEncoding.SidecarBinary,
];
if (params.noView && params.model === 'sidecar') throw new Error('SIDECAR_MODEL_WITHOUT_VIEW');
const metadata: types.FileMetadata = [{}, FileModelEncoding.Auto];
let model: Uint8Array | unknown | null = null;
const modelFormat = params.model ?? 'sidecar';
switch (modelFormat) {
Expand All @@ -35,58 +107,80 @@ export class File {
break;
}
case 'binary': {
metadata[1] = FileModelEncoding.StructuralBinary;
model = this.model.toBinary();
break;
}
case 'compact': {
metadata[1] = FileModelEncoding.StructuralCompact;
model = new StructuralEncoderCompact().encode(this.model);
break;
}
case 'verbose': {
metadata[1] = FileModelEncoding.StructuralVerbose;
model = new StructuralEncoderVerbose().encode(this.model);
break;
}
case 'none': {
model = null;
break;
}
default:
throw new Error(`Invalid model format: ${modelFormat}`);
}
const history: types.FileWriteSequenceHistory = [
null,
[],
];
const history: types.FileWriteSequenceHistory = [null, []];
const patchFormat = params.history ?? 'binary';
switch (patchFormat) {
case 'binary': {
history[0] = this.history.start.toBinary();
this.history.patches.forEach(({v}) => {
history[0] = this.log.start.toBinary();
this.log.patches.forEach(({v}) => {
history[1].push(v.toBinary());
});
break;
}
case 'compact': {
history[0] = new StructuralEncoderCompact().encode(this.history.start);
this.history.patches.forEach(({v}) => {
history[0] = new StructuralEncoderCompact().encode(this.log.start);
this.log.patches.forEach(({v}) => {
history[1].push(encodeCompact(v));
});
break;
}
case 'verbose': {
history[0] = new StructuralEncoderVerbose().encode(this.history.start);
this.history.patches.forEach(({v}) => {
history[0] = new StructuralEncoderVerbose().encode(this.log.start);
this.log.patches.forEach(({v}) => {
history[1].push(encodeVerbose(v));
});
break;
}
case 'none': {
break;
}
default:
throw new Error(`Invalid history format: ${patchFormat}`);
}
return [
view,
metadata,
model,
history,
];
return [params.noView ? null : this.model.view(), metadata, model, history];
}

public toBinary(params: types.FileEncodingParams): Uint8Array {
const sequence = this.serialize(params);
const writer = new Writer(16 * 1024);
switch (params.format) {
case 'ndjson': {
const json = new JsonEncoder(writer);
for (const component of sequence) {
json.writeAny(component);
json.writer.u8('\n'.charCodeAt(0));
}
return json.writer.flush();
}
case 'seq.cbor': {
const cbor = new CborEncoder(writer);
for (const component of sequence) cbor.writeAny(component);
return cbor.writer.flush();
}
}
}

// ---------------------------------------------------------------- Printable

public toString(tab?: string) {
return `file` + printTree(tab, [(tab) => this.model.toString(tab), () => '', (tab) => this.log.toString(tab)]);
}
}
48 changes: 42 additions & 6 deletions src/json-crdt/file/PatchLog.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
import {ITimestampStruct, Patch, ServerClockVector, compare} from "../../json-crdt-patch";
import {AvlMap} from "../../util/trees/avl/AvlMap";
import {Model} from "../model";
import {ITimestampStruct, Patch, compare} from '../../json-crdt-patch';
import {printTree} from '../../util/print/printTree';
import {AvlMap} from '../../util/trees/avl/AvlMap';
import {Model} from '../model';
import type {Printable} from '../../util/print/types';
import {first, next} from '../../util/trees/util';

export class PatchLog {
public static fromModel (model: Model): PatchLog {
export class PatchLog implements Printable {
public static fromModel(model: Model<any>): PatchLog {
const start = new Model(model.clock.clone());
const log = new PatchLog(start);
if (model.api.builder.patch.ops.length) {
Expand All @@ -15,11 +18,44 @@ export class PatchLog {

public readonly patches = new AvlMap<ITimestampStruct, Patch>(compare);

constructor (public readonly start: Model) {}
constructor(public readonly start: Model) {}

public push(patch: Patch): void {
const id = patch.getId();
if (!id) return;
this.patches.set(id, patch);
}

public replayToEnd(): Model {
const clone = this.start.clone();
for (let node = first(this.patches.root); node; node = next(node)) clone.applyPatch(node.v);
return clone;
}

public replayTo(ts: ITimestampStruct): Model {
const clone = this.start.clone();
for (let node = first(this.patches.root); node && compare(ts, node.k) >= 0; node = next(node))
clone.applyPatch(node.v);
return clone;
}

// ---------------------------------------------------------------- Printable

public toString(tab?: string) {
const log: Patch[] = [];
this.patches.forEach(({v}) => log.push(v));
return (
`log` +
printTree(tab, [
(tab) => this.start.toString(tab),
() => '',
(tab) =>
'history' +
printTree(
tab,
log.map((patch, i) => (tab) => `${i}: ${patch.toString(tab)}`),
),
])
);
}
}
Loading

0 comments on commit fa08c52

Please sign in to comment.