diff --git a/config/karma.config.js b/config/karma.config.js index 7426e9c0..19ded567 100644 --- a/config/karma.config.js +++ b/config/karma.config.js @@ -63,6 +63,9 @@ module.exports = function (config) { test: /\.ts$/ } ] + }, + externals: { + fs: "fs" } }, webpackMiddleware: { diff --git a/config/nycrc.node.json b/config/nycrc.node.json index ec52b271..6a80208e 100644 --- a/config/nycrc.node.json +++ b/config/nycrc.node.json @@ -9,7 +9,8 @@ "exclude": [ "**/*.helper.[jt]s", "**/*.spec.[jt]s", - "**/spec/**/*.[jt]s" + "**/spec/**/*.[jt]s", + "**/lokijs/**/*.js" ], "report-dir": "./coverage/node/" } diff --git a/packages/common/types.ts b/packages/common/types.ts index 9ddc7de4..e8c47b41 100644 --- a/packages/common/types.ts +++ b/packages/common/types.ts @@ -1,10 +1,11 @@ /** * @hidden */ -import { Loki } from "../loki/src/loki"; +import { Loki } from "../loki/src"; +import { Serialization } from "../loki/src/serialization/migration"; export interface StorageAdapter { - loadDatabase(dbname: string): Promise; + loadDatabase(dbname: string): Promise; saveDatabase?(dbname: string, serialization: string): Promise; @@ -25,11 +26,10 @@ export type Doc = T & { }; }; -export interface Dict { +export type Dict = { [index: string]: T; - [index: number]: T; -} +}; diff --git a/packages/fs-storage/spec/node/fs_storage.spec.ts b/packages/fs-storage/spec/node/fs_storage.spec.ts index 177a56e3..6fe9a45d 100644 --- a/packages/fs-storage/spec/node/fs_storage.spec.ts +++ b/packages/fs-storage/spec/node/fs_storage.spec.ts @@ -2,6 +2,9 @@ import { Loki } from "../../../loki/src/loki"; import { FSStorage } from "../../src/fs_storage"; +declare var require: (moduleId: string) => any; +const loki = require("../../../lokijs/lokijs.js"); + describe("testing fs storage", function () { interface Name { @@ -68,4 +71,23 @@ describe("testing fs storage", function () { }); }); }); + + it("from lokijs", (done) => { + const legacyDB = new loki("legacyDB", {persistenceMethod: "fs"}); + const coll = legacyDB.addCollection("myColl"); + coll.insert({name: "Hello World"}); + legacyDB.saveDatabase(() => { + // Load with LokiDB. + const db = new Loki("legacyDB"); + return db.initializePersistence() + .then(() => { + return db.loadDatabase(); + }).then(() => { + expect(db.getCollection("myColl").find()[0].name).toEqual("Hello World"); + return db.deleteDatabase(); + }).then(() => { + done(); + }); + }); + }); }); diff --git a/packages/full-text-search/src/full_text_search.ts b/packages/full-text-search/src/full_text_search.ts index 179b28e2..7cedee41 100644 --- a/packages/full-text-search/src/full_text_search.ts +++ b/packages/full-text-search/src/full_text_search.ts @@ -5,6 +5,7 @@ import { PLUGINS } from "../../common/plugin"; import { Query } from "./query_types"; import { Scorer } from "./scorer"; import { Analyzer } from "./analyzer/analyzer"; +import { Serialization } from "../../loki/src/serialization/migration"; export class FullTextSearch { /// The id field of each document. @@ -78,7 +79,7 @@ export class FullTextSearch { return this._idxSearcher.search(query); } - public toJSON(): FullTextSearch.Serialization { + public toJSON(): Serialization.FullTextSearch { let serialized = {id: this._id, ii: {}}; let fieldNames = Object.keys(this._invIdxs); for (let i = 0; i < fieldNames.length; i++) { @@ -88,7 +89,7 @@ export class FullTextSearch { return serialized; } - public static fromJSONObject(serialized: FullTextSearch.Serialization, analyzers: Dict = {}): FullTextSearch { + public static fromJSONObject(serialized: Serialization.FullTextSearch, analyzers: Dict = {}): FullTextSearch { let fts = new FullTextSearch([], serialized.id); let fieldNames = Object.keys(serialized.ii); for (let i = 0; i < fieldNames.length; i++) { @@ -103,9 +104,4 @@ export namespace FullTextSearch { export interface FieldOptions extends InvertedIndex.FieldOptions { field: string; } - - export interface Serialization { - id: string; - ii: Dict; - } } diff --git a/packages/full-text-search/src/inverted_index.ts b/packages/full-text-search/src/inverted_index.ts index ba0f7e74..b47633a9 100644 --- a/packages/full-text-search/src/inverted_index.ts +++ b/packages/full-text-search/src/inverted_index.ts @@ -1,4 +1,5 @@ import { Analyzer, StandardAnalyzer, analyze } from "./analyzer/analyzer"; +import { Serialization } from "../../loki/src/serialization/migration"; /** * Converts a string into an array of code points. @@ -31,7 +32,7 @@ export class InvertedIndex { public docCount: number = 0; public docStore: Map = new Map(); public totalFieldLength: number = 0; - public root: InvertedIndex.Index = new Map(); + public root: InvertedIndex.Index = new Map() as InvertedIndex.Index; private _store: boolean; private _optimizeChanges: boolean; @@ -219,11 +220,11 @@ export class InvertedIndex { * Serialize the inverted index. * @returns {{docStore: *, _fields: *, index: *}} */ - public toJSON(): InvertedIndex.Serialization { + public toJSON(): Serialization.FullTextSearch.InvertedIndex { if (this._store) { return { - _store: true, - _optimizeChanges: this._optimizeChanges, + store: true, + optimizeChanges: this._optimizeChanges, docCount: this.docCount, docStore: [...this.docStore], totalFieldLength: this.totalFieldLength, @@ -231,8 +232,8 @@ export class InvertedIndex { }; } return { - _store: false, - _optimizeChanges: this._optimizeChanges, + store: false, + optimizeChanges: this._optimizeChanges, }; } @@ -241,14 +242,14 @@ export class InvertedIndex { * @param {{docStore: *, _fields: *, index: *}} serialized - The serialized inverted index. * @param {Analyzer} analyzer[undefined] - an analyzer */ - public static fromJSONObject(serialized: InvertedIndex.Serialization, analyzer?: Analyzer): InvertedIndex { + public static fromJSONObject(serialized: Serialization.FullTextSearch.InvertedIndex, analyzer?: Analyzer): InvertedIndex { const invIdx = new InvertedIndex({ - store: serialized._store, - optimizeChanges: serialized._optimizeChanges, + store: serialized.store, + optimizeChanges: serialized.optimizeChanges, analyzer: analyzer }); - if (serialized._store) { + if (serialized.store) { invIdx.docCount = serialized.docCount; invIdx.docStore = new Map(serialized.docStore); invIdx.totalFieldLength = serialized.totalFieldLength; @@ -262,8 +263,8 @@ export class InvertedIndex { return invIdx; } - private static _serializeIndex(idx: InvertedIndex.Index): InvertedIndex.SerializedIndex { - const serialized: InvertedIndex.SerializedIndex = {}; + private static _serializeIndex(idx: InvertedIndex.Index): Serialization.FullTextSearch.Index { + const serialized: Serialization.FullTextSearch.Index = {}; if (idx.dc !== undefined) { serialized.d = {df: idx.df, dc: [...idx.dc]}; } @@ -284,8 +285,8 @@ export class InvertedIndex { return serialized; } - private static _deserializeIndex(serialized: InvertedIndex.SerializedIndex): InvertedIndex.Index { - const idx: InvertedIndex.Index = new Map(); + private static _deserializeIndex(serialized: Serialization.FullTextSearch.Index): InvertedIndex.Index { + const idx: InvertedIndex.Index = new Map() as InvertedIndex.Index; if (serialized.k !== undefined) { for (let i = 0; i < serialized.k.length; i++) { @@ -374,31 +375,6 @@ export namespace InvertedIndex { export type IndexTerm = { index: Index, term: number[] }; - export interface SerializedIndex { - d?: { - df: number; - dc: [DocumentIndex, number][] - }; - k?: number[]; - v?: SerializedIndex[]; - } - - export type Serialization = SpareSerialization | FullSerialization; - - export type SpareSerialization = { - _store: false; - _optimizeChanges: boolean; - }; - - export type FullSerialization = { - _store: true; - _optimizeChanges: boolean; - docCount: number; - docStore: [DocumentIndex, DocStore][]; - totalFieldLength: number; - root: SerializedIndex; - }; - export interface DocStore { fieldLength?: number; indexRef?: Index[]; diff --git a/packages/indexed-storage/spec/web/indexed_storage.spec.ts b/packages/indexed-storage/spec/web/indexed_storage.spec.ts index 79289314..18b5f78f 100644 --- a/packages/indexed-storage/spec/web/indexed_storage.spec.ts +++ b/packages/indexed-storage/spec/web/indexed_storage.spec.ts @@ -2,6 +2,10 @@ import { Loki } from "../../../loki/src/loki"; import { IndexedStorage } from "../../src/indexed_storage"; +declare var require: (moduleId: string) => any; +const loki = require("../../../lokijs/lokijs.js"); +const indexedAdapter = require("../../../lokijs/loki-indexed-adapter.js"); + describe("testing indexed storage", function () { interface Name { @@ -68,4 +72,23 @@ describe("testing indexed storage", function () { }); }); }); + + it("from lokijs", (done) => { + const legacyDB = new loki("legacyDB", {adapter: new indexedAdapter()}); + const coll = legacyDB.addCollection("myColl"); + coll.insert({name: "Hello World"}); + legacyDB.saveDatabase(() => { + // Load with LokiDB. + const db = new Loki("legacyDB"); + return db.initializePersistence() + .then(() => { + return db.loadDatabase(); + }).then(() => { + expect(db.getCollection("myColl").find()[0].name).toEqual("Hello World"); + return db.deleteDatabase(); + }).then(() => { + done(); + }); + }); + }); }); diff --git a/packages/indexed-storage/src/indexed_storage.ts b/packages/indexed-storage/src/indexed_storage.ts index ef1214a8..70f58d36 100644 --- a/packages/indexed-storage/src/indexed_storage.ts +++ b/packages/indexed-storage/src/indexed_storage.ts @@ -67,7 +67,7 @@ export class IndexedStorage implements StorageAdapter { * @param {string} dbname - the name of the database to retrieve. * @returns {Promise} a Promise that resolves after the database was loaded */ - loadDatabase(dbname: string) { + loadDatabase(dbname: string): Promise { const appName = this._appname; const adapter = this; diff --git a/packages/local-storage/spec/web/local_storage.spec.ts b/packages/local-storage/spec/web/local_storage.spec.ts index 7b96dbf9..16711743 100644 --- a/packages/local-storage/spec/web/local_storage.spec.ts +++ b/packages/local-storage/spec/web/local_storage.spec.ts @@ -2,6 +2,9 @@ import { Loki } from "../../../loki/src/loki"; import { LocalStorage } from "../../src/local_storage"; +declare var require: (moduleId: string) => any; +const loki = require("../../../lokijs/lokijs.js"); + describe("testing local storage", function () { interface Name { @@ -68,4 +71,23 @@ describe("testing local storage", function () { }); }); }); + + it("from lokijs", (done) => { + const legacyDB = new loki("legacyDB", {persistenceMethod: "localStorage"}); + const coll = legacyDB.addCollection("myColl"); + coll.insert({name: "Hello World"}); + legacyDB.saveDatabase(() => { + // Load with LokiDB. + const db = new Loki("legacyDB"); + return db.initializePersistence() + .then(() => { + return db.loadDatabase(); + }).then(() => { + expect(db.getCollection("myColl").find()[0].name).toEqual("Hello World"); + return db.deleteDatabase(); + }).then(() => { + done(); + }); + }); + }); }); diff --git a/packages/loki/spec/generic/migration.spec.ts b/packages/loki/spec/generic/migration.spec.ts new file mode 100644 index 00000000..a88e38e7 --- /dev/null +++ b/packages/loki/spec/generic/migration.spec.ts @@ -0,0 +1,278 @@ +import { Loki } from "../../src/loki"; +import { Serialization, mergeRightBiasedWithProxy } from "../../src/serialization/migration"; + +declare var require: (moduleId: string) => any; +const loki = require("../../../lokijs/lokijs.js"); + +describe("load different database versions", function () { + it("mergeRightBiasedWithProxy", () => { + let left = { + version: 1 as 1, + name: "abc", + value: 2, + sub: { + query: [1, 2, 3], + older: { + enabled: false as false + } + }, + oldObj: { + prop1: { + val: true + } + }, + oldProp: 1, + query: { + doit: 1 + }, + filter: [1, 2], + colls: [ + { + id: 1, + trans: [ + { + ix: 1 + } + ], + names: "abc" as "abc" | "def", + }, + { + id: 2, + trans: [ + { + ix: 2 + }, + { + ix: 3 + } + ], + names: "def" as "abc" | "def", + } + ], + callbackOld: () => false, + callback: () => "1" + }; + + let right = { + version: 2 as 2, + name: "def", + value: 3, + sub: { + query: [true, false, false], + newer: { + disabled: false + } + }, + newProp: 2.0 as 2.0, + query: [1, 3, true], + filter: { + one: 2 + }, + colls: left.colls.map(coll => mergeRightBiasedWithProxy(coll, { + name: coll.id + ":id", + trans: coll.trans.map(tran => mergeRightBiasedWithProxy(tran, { + nx: tran.ix + ":ix" + })), + names: (coll.names === "def" ? "xyz" : coll.names) as "abc" | "xyz" + })), + callback: () => 1, + callbackNew: () => "2" + }; + + interface Merged { + version: 2; + name: string; + value: number; + sub: { + query: boolean[]; + older: { + enabled: false + } + newer: { + disabled: boolean + } + }; + oldObj: { + prop1: { + val: boolean; + } + }; + oldProp: number; + newProp: 2.0; + query: (boolean | number)[]; + filter: { + one: number + }; + colls: { + id: number; + name: string; + trans: { + ix: number; + nx: string; + }[]; + names: "abc" | "xyz"; + }[]; + callbackOld: () => boolean; + callback: () => number; + callbackNew: () => string; + } + + let merged: Merged = mergeRightBiasedWithProxy(left, right); + expect(merged.version).toEqual(2); + expect(merged.name).toEqual("def"); + expect(merged.value).toEqual(3); + expect(merged.sub.query).toEqual([true, false, false]); + expect(merged.sub.older.enabled).toEqual(false); + expect(merged.sub.newer.disabled).toEqual(false); + expect(merged.oldObj.prop1.val).toEqual(true); + expect(merged.oldProp).toEqual(1); + expect(merged.query).toEqual([1, 3, true]); + expect(merged.newProp).toEqual(2.0); + expect(merged.filter.one).toEqual(2); + + expect(merged.colls.length).toEqual(2); + expect(merged.colls[0].id).toEqual(1); + expect(merged.colls[0].name).toEqual("1:id"); + expect(merged.colls[0].names).toEqual("abc"); + expect(merged.colls[1].id).toEqual(2); + expect(merged.colls[1].name).toEqual("2:id"); + expect(merged.colls[1].names).toEqual("xyz"); + expect(merged.colls[0].trans.length).toEqual(1); + expect(merged.colls[1].trans.length).toEqual(2); + expect(merged.colls[0].trans[0].ix).toEqual(1); + expect(merged.colls[0].trans[0].nx).toEqual("1:ix"); + expect(merged.colls[1].trans[0].ix).toEqual(2); + expect(merged.colls[1].trans[0].nx).toEqual("2:ix"); + expect(merged.colls[1].trans[1].ix).toEqual(3); + expect(merged.colls[1].trans[1].nx).toEqual("3:ix"); + + expect(merged.callbackOld()).toEqual(false); + expect(merged.callback()).toEqual(1); + expect(merged.callbackNew()).toEqual("2"); + }); +}); + +describe("load lokijs", () => { + + interface Data { + a: number; + b: number; + c: number; + d: { + msg: string; + }; + } + + const legacyDB = new loki(); + { + const abc = legacyDB.addCollection("abc", {indices: ["a", "c"], unique: ["b"]}); + abc.insert([ + { + a: 1, b: 2, c: 1, d: { + msg: "hello" + } + }, + { + a: 2, b: 6, c: 2, d: { + msg: "loki" + } + }, + { + a: 3, b: 8, c: 1, d: { + msg: "legacy" + } + }, + ] as Data[]); + const tx = [ + { + type: "find", + value: { + "d.msg": "loki" + } + } + ]; + abc.addTransform("findLoki", tx); + + const txParam = [ + { + type: "limit", + value: "[%lktxp]param" + } + ]; + abc.addTransform("limit", txParam); + + const dyn = abc.addDynamicView("notLoki"); + dyn.applyFind({c: 1}); + dyn.applySimpleSort("a", true); + } + + it("test lokijs", () => { + + const abc = legacyDB.getCollection("abc"); + + let result = abc.chain("findLoki").data(); + expect(result.length).toEqual(1); + expect(result[0].d.msg).toEqual("loki"); + + result = abc.chain("limit", {param: 1}).data(); + expect(result.length).toEqual(1); + + + const dyn = abc.getDynamicView("notLoki"); + result = dyn.data(); + expect(result.length).toEqual(2); + expect(result[0].d.msg).toEqual("legacy"); + expect(result[1].d.msg).toEqual("hello"); + }); + + it("test LokiDB with serialization inflation", () => { + let db = new Loki(); + db.loadJSONObject(legacyDB as Serialization.Serialized, { + migrate: (_1, coll) => { + coll.nestedProperties = [{name: "d.msg", path: ["d", "msg"]}]; + return false; + } + }); + + const abc = db.getCollection("abc"); + + let result = abc.chain("findLoki").data(); + expect(result.length).toEqual(1); + expect(result[0].d.msg).toEqual("loki"); + + result = abc.chain("limit", {param: 1}).data(); + expect(result.length).toEqual(1); + + const dyn = abc.getDynamicView("notLoki"); + dyn.applyFind({c: 1}); + dyn.applySimpleSort("a", true); + result = dyn.data(); + expect(result.length).toEqual(2); + expect(result[0].d.msg).toEqual("legacy"); + expect(result[1].d.msg).toEqual("hello"); + }); + + + it("test LokiDB with options inflation", () => { + const db = new Loki(); + + // Transform and dynamic view will not be deserialized. + db.loadJSONObject(legacyDB as Serialization.Serialized, { + migrate: (_1, _2, options) => { + options.nestedProperties = ["d.msg"]; + return true; + } + }); + + const abc = db.getCollection("abc"); + + let result = abc.find({"d.msg": "loki"}); + expect(result.length).toEqual(1); + expect(result[0].d.msg).toEqual("loki"); + + result = abc.chain().find({c: 1}).simplesort("a", true).data(); + expect(result.length).toEqual(2); + expect(result[0].d.msg).toEqual("legacy"); + expect(result[1].d.msg).toEqual("hello"); + }); +}); diff --git a/packages/loki/spec/generic/persistence.spec.ts b/packages/loki/spec/generic/persistence.spec.ts index 57fa85f2..915951c9 100644 --- a/packages/loki/spec/generic/persistence.spec.ts +++ b/packages/loki/spec/generic/persistence.spec.ts @@ -427,7 +427,6 @@ describe("testing adapter functionality", () => { expect(typeof(dbname)).toEqual("string"); expect(dbref.constructor.name).toEqual("Loki"); - expect(dbref["_persistenceAdapter"]).toEqual(null); expect(dbref["_collections"].length).toEqual(2); // these changes should not affect original database dbref["filename"] = "somethingelse"; diff --git a/packages/loki/spec/generic/typed.spec.ts b/packages/loki/spec/generic/typed.spec.ts index c472eaa5..00b4a39f 100644 --- a/packages/loki/spec/generic/typed.spec.ts +++ b/packages/loki/spec/generic/typed.spec.ts @@ -1,6 +1,7 @@ /* global describe, it, expect */ import { Loki } from "../../src/loki"; import { Doc } from "../../../common/types"; +import { Serialization } from "../../src/serialization/migration"; describe("typed", () => { it("works", () => { @@ -17,11 +18,13 @@ describe("typed", () => { } } - const json = { + const json: Serialization.Loki = { "filename": "test.json", - "_collections": [{ + "databaseVersion": 2, + "engineVersion": 2, + "collections": [{ "name": "users", - "_data": [{ + "data": [{ "name": "joe", "meta": { "version": 0, @@ -42,11 +45,24 @@ describe("typed", () => { "binaryIndices": {}, "transactional": false, "maxId": 2, - "_nestedProperties": [] as any[], - }], - "events": {}, - "ENV": "NODEJS", - "fs": {} + "nestedProperties": [] as any[], + "dynamicViews": [], + "uniqueNames": [], + "transforms": {}, + "dirty": false, + "adaptiveBinaryIndices": false, + "asyncListeners": false, + "disableMeta": false, + "disableChangesApi": false, + "disableDeltaChangesApi": false, + "cloneMethod": "deep", + "cloneObjects": false, + "changes": [], + "serializableIndices": false, + "ttl": null, + "ttlInterval": null, + "fullTextSearch": null + }] }; // Loading only using proto: diff --git a/packages/loki/src/clone.ts b/packages/loki/src/clone.ts index 627d57b1..e08e9b38 100644 --- a/packages/loki/src/clone.ts +++ b/packages/loki/src/clone.ts @@ -90,3 +90,4 @@ export function clone(data: T, method: CloneMethod = "parse-stringify"): T { return cloned as any as T; } + diff --git a/packages/loki/src/collection.ts b/packages/loki/src/collection.ts index cbcffbf8..ee8dfdb4 100644 --- a/packages/loki/src/collection.ts +++ b/packages/loki/src/collection.ts @@ -8,6 +8,7 @@ import { Doc, Dict } from "../../common/types"; import { FullTextSearch } from "../../full-text-search/src/full_text_search"; import { PLUGINS } from "../../common/plugin"; import { Analyzer } from "../../full-text-search/src/analyzer/analyzer"; +import { Serialization } from "./serialization/migration"; export {CloneMethod} from "./clone"; @@ -155,7 +156,7 @@ export class Collection dV.toJSON()), uniqueNames: Object.keys(this._constraints.unique), transforms: this._transforms as any, binaryIndices: this._binaryIndices as any, - _data: this._data, + data: this._data, idIndex: this._idIndex, maxId: this._maxId, - _dirty: this._dirty, - _nestedProperties: this._nestedProperties, + dirty: this._dirty, + nestedProperties: this._nestedProperties, adaptiveBinaryIndices: this._adaptiveBinaryIndices, transactional: this._transactional, asyncListeners: this._asyncListeners, @@ -325,13 +326,17 @@ export class Collection(obj.name, { + + static fromJSONObject(obj: Serialization.Collection, options?: Collection.DeserializeOptions) { + let coll = new Collection(obj.name, { disableChangesApi: obj.disableChangesApi, disableDeltaChangesApi: obj.disableDeltaChangesApi }); @@ -344,11 +349,11 @@ export class Collection { const now = Date.now(); const toRemove = this.chain().where((member: Doc) => { @@ -2179,13 +2186,14 @@ export namespace Collection { export interface DeserializeOptions { retainDirtyFlags?: boolean; fullTextSearch?: Dict; + migrate?: (databaseVersion: number, coll: Serialization.Collection, options: Collection.Options) => boolean; [collName: string]: any | { proto?: any; inflate?: (src: object, dest?: object) => void }; } export interface BinaryIndex { dirty: boolean; - values: any; + values: number[]; } export interface Change { @@ -2194,29 +2202,6 @@ export namespace Collection { obj: any; } - export interface Serialized { - name: string; - _dynamicViews: DynamicView[]; - _nestedProperties: { name: string, path: string[] }[]; - uniqueNames: string[]; - transforms: Dict; - binaryIndices: Dict; - _data: Doc[]; - idIndex: number[]; - maxId: number; - _dirty: boolean; - adaptiveBinaryIndices: boolean; - transactional: boolean; - asyncListeners: boolean; - disableMeta: boolean; - disableChangesApi: boolean; - disableDeltaChangesApi: boolean; - cloneObjects: boolean; - cloneMethod: CloneMethod; - changes: any; - _fullTextSearch: FullTextSearch; - } - export interface CheckIndexOptions { randomSampling?: boolean; randomSamplingFactor?: number; @@ -2272,7 +2257,7 @@ export namespace Collection { export interface TTL { age: number; - ttlInterval: number; + interval: number; daemon: any; // setInterval Timer } } diff --git a/packages/loki/src/dynamic_view.ts b/packages/loki/src/dynamic_view.ts index f4a0a502..8b680834 100644 --- a/packages/loki/src/dynamic_view.ts +++ b/packages/loki/src/dynamic_view.ts @@ -3,6 +3,7 @@ import { ResultSet } from "./result_set"; import { Collection } from "./collection"; import { Doc } from "../../common/types"; import { Scorer } from "../../full-text-search/src/scorer"; +import { Serialization } from "./serialization/migration"; /** * DynamicView class is a versatile 'live' view class which can have filters and sorts applied. @@ -150,32 +151,33 @@ export class DynamicView; - _filterPipeline: Filter[]; - _sortCriteria: (string | [string, boolean])[]; - _sortCriteriaSimple: { field: string, options: boolean | ResultSet.SimpleSortOptions }; - _sortByScoring: boolean; - _sortDirty: boolean; - } - export type Filter = { type: "find"; val: ResultSet.Query>; diff --git a/packages/loki/src/loki.ts b/packages/loki/src/loki.ts index 05cb48f0..e09544e8 100644 --- a/packages/loki/src/loki.ts +++ b/packages/loki/src/loki.ts @@ -1,8 +1,10 @@ /* global global */ import { LokiEventEmitter } from "./event_emitter"; import { Collection } from "./collection"; +import { clone } from "./clone"; import { Doc, StorageAdapter } from "../../common/types"; import { PLUGINS } from "../../common/plugin"; +import { Serialization, migrateDatabase } from "./serialization/migration"; function getENV(): Loki.Environment { if (global !== undefined && (global["android"] || global["NSObject"])) { @@ -37,8 +39,8 @@ export class Loki extends LokiEventEmitter { // persist version of code which created the database to the database. // could use for upgrade scenarios - private databaseVersion: number = 1.5; // TODO - private engineVersion: number = 1.5; + private databaseVersion: 2.0 = 2.0; + private engineVersion: 2.0 = 2.0; public _collections: Collection[]; @@ -187,28 +189,16 @@ export class Loki extends LokiEventEmitter { /** * Copies 'this' database into a new Loki instance. Object references are shared to make lightweight. - * @param {object} options - options - * @param {boolean} options.removeNonSerializable - nulls properties not safe for serialization. + * @return {Loki} a shallow copy. */ - public copy(options: Loki.CopyOptions = {}): Loki { - const databaseCopy = new Loki(this.filename, {env: this._env}); + private _copy(): Loki { + const dbCopy = clone(this, "shallow"); + dbCopy._collections = []; - // currently inverting and letting loadJSONObject do most of the work - databaseCopy.loadJSONObject(this, { - retainDirtyFlags: true - }); - - // since our toJSON is not invoked for reference database adapters, this will let us mimic - if (options.removeNonSerializable) { - databaseCopy._persistenceAdapter = null; - - for (let idx = 0; idx < databaseCopy._collections.length; idx++) { - databaseCopy._collections[idx]._constraints = null; - databaseCopy._collections[idx]._ttl = null; - } + for (let i = 0; i < this._collections.length; i++) { + dbCopy._collections[i] = clone(this._collections[i], "shallow"); } - - return databaseCopy; + return dbCopy; } /** @@ -333,19 +323,12 @@ export class Loki extends LokiEventEmitter { } // alias of serialize - public toJSON(): Loki.Serialized { + public toJSON(): Serialization.Loki { return { - _env: this._env, - _serializationMethod: this._serializationMethod, - _autosave: this._autosave, - _autosaveInterval: this._autosaveInterval, - _collections: this._collections, + filename: this.filename, databaseVersion: this.databaseVersion, engineVersion: this.engineVersion, - filename: this.filename, - _persistenceAdapter: this._persistenceAdapter, - _persistenceMethod: this._persistenceMethod, - _throttledSaves: this._throttledSaves + collections: this._collections.map(c => c.toJSON()), }; } @@ -386,8 +369,7 @@ export class Loki extends LokiEventEmitter { } // not just an individual collection, so we will need to serialize db container via shallow copy - let dbcopy = new Loki(this.filename); - dbcopy.loadJSONObject(this); + let dbcopy = this._copy(); for (let idx = 0; idx < dbcopy._collections.length; idx++) { dbcopy._collections[idx]._data = []; @@ -523,7 +505,8 @@ export class Loki extends LokiEventEmitter { * * @returns {object|array} An object representation of the deserialized database, not yet applied to 'this' db or document array */ - public deserializeDestructured(destructuredSource: string | string[], options: Loki.SerializeDestructuredOptions = {}) { + public deserializeDestructured(destructuredSource: string | string[], + options: Loki.SerializeDestructuredOptions = {}) { if (options.partitioned === undefined) { options.partitioned = false; } @@ -553,13 +536,12 @@ export class Loki extends LokiEventEmitter { } // Otherwise we are restoring an entire partitioned db - const cdb = JSON.parse(destructuredSource[0]); - const collCount = cdb._collections.length; + const cdb: Serialization.Loki = JSON.parse(destructuredSource[0]); + const collCount = cdb.collections.length; for (let collIndex = 0; collIndex < collCount; collIndex++) { // attach each collection docarray to container collection data, add 1 to collection array index since db is at 0 - cdb._collections[collIndex]._data = this.deserializeCollection(destructuredSource[collIndex + 1], options); + cdb.collections[collIndex].data = this.deserializeCollection(destructuredSource[collIndex + 1], options); } - return cdb; } @@ -583,8 +565,8 @@ export class Loki extends LokiEventEmitter { } // first line is database and collection shells - const cdb = JSON.parse(workarray[0]); - const collCount = cdb._collections.length; + const cdb: Serialization.Loki = JSON.parse(workarray[0]); + const collCount = cdb.collections.length; workarray[0] = null; let collIndex = 0; @@ -598,7 +580,7 @@ export class Loki extends LokiEventEmitter { done = true; } } else { - cdb._collections[collIndex]._data.push(JSON.parse(workarray[lineIndex])); + cdb.collections[collIndex].data.push(JSON.parse(workarray[lineIndex])); } // lower memory pressure and advance iterator @@ -618,7 +600,8 @@ export class Loki extends LokiEventEmitter { * * @returns {Array} an array of documents to attach to collection.data. */ - public deserializeCollection(destructuredSource: string | string[], options: Loki.DeserializeCollectionOptions = {}): Doc[] { + public deserializeCollection(destructuredSource: string | string[], + options: Loki.DeserializeCollectionOptions = {}): Doc[] { if (options.partitioned === undefined) { options.partitioned = false; } @@ -676,21 +659,64 @@ export class Loki extends LokiEventEmitter { /** * Inflates a loki database from a JS object - * - * @param {object} dbObject - a serialized loki database object + * @param {object} obj - a serialized loki database object * @param {object} options - apply or override collection level settings * @param {boolean} options.retainDirtyFlags - whether collection dirty flags will be preserved */ - public loadJSONObject(dbObject: Loki, options?: Collection.DeserializeOptions): void; - public loadJSONObject(dbObject: Loki.Serialized, options?: Collection.DeserializeOptions): void; - public loadJSONObject(dbObject: any, options: Collection.DeserializeOptions = {}): void { - const len = dbObject._collections ? dbObject._collections.length : 0; + public loadJSONObject(obj: Serialization.Serialized, options: Collection.DeserializeOptions = {}): void { + + const databaseVersion = obj.databaseVersion; + const dbObj = migrateDatabase(obj); - this.filename = dbObject.filename; + const len = dbObj.collections ? dbObj.collections.length : 0; + this.filename = dbObj.filename; this._collections = []; - for (let i = 0; i < len; ++i) { - this._collections.push(Collection.fromJSONObject(dbObject._collections[i], options)); + for (let i = 0; i < len; i++) { + let coll = null; + const dbColl = dbObj.collections[i]; + + if (options.migrate) { + // Generate options from serialized collection. + const collOptions: Collection.Options = {}; + + collOptions.adaptiveBinaryIndices = dbColl.adaptiveBinaryIndices; + collOptions.nestedProperties = dbColl.nestedProperties; + collOptions.asyncListeners = dbColl.asyncListeners; + collOptions.disableChangesApi = dbColl.disableChangesApi; + collOptions.disableDeltaChangesApi = dbColl.disableDeltaChangesApi; + collOptions.disableMeta = dbColl.disableMeta; + collOptions.indices = Object.keys(dbColl.binaryIndices); + collOptions.unique = dbColl.uniqueNames; + collOptions.clone = dbColl.cloneObjects; + collOptions.cloneMethod = dbColl.cloneMethod; + collOptions.serializableIndices = dbColl.serializableIndices; + collOptions.ttl = dbColl.ttl; + collOptions.ttlInterval = dbColl.ttlInterval; + collOptions.transactional = dbColl.transactional; + + if (collOptions.fullTextSearch) { + collOptions.fullTextSearch = []; + for (let [key, value] of Object.entries(dbColl.fullTextSearch.ii)) { + collOptions.fullTextSearch.push({ + field: key, store: value.store, optimizeChanges: value.optimizeChanges + }); + } + } + + if (options.migrate(databaseVersion, dbColl, collOptions)) { + coll = this.addCollection(dbColl.name, collOptions); + for (let j = 0; j < dbColl.data.length; j++) { + delete dbColl.data[j].$loki; + coll.insert(dbColl.data[j]); + } + } + } + + if (!coll) { + coll = Collection.fromJSONObject(dbObj.collections[i], options); + } + this._collections.push(coll); } } @@ -837,24 +863,16 @@ export class Loki extends LokiEventEmitter { return Promise.reject(new Error("persistenceAdapter not configured")); } - return Promise.resolve(this._persistenceAdapter.loadDatabase(this.filename)) - .then((dbString) => { - if (typeof (dbString) === "string") { - this.loadJSON(dbString, options); - this.emit("load", this); + return this._persistenceAdapter.loadDatabase(this.filename) + .then((obj) => { + if (typeof obj === "string") { + this.loadJSON(obj, options); + } else if (obj instanceof Loki) { + this._collections = obj._collections; } else { - dbString = dbString as object; - // if adapter has returned an js object (other than null or error) attempt to load from JSON object - if (typeof (dbString) === "object" && dbString !== null && !(dbString instanceof Error)) { - this.loadJSONObject(dbString, options); - this.emit("load", this); - } else { - if (dbString instanceof Error) - throw dbString; - - throw new TypeError("The persistence adapter did not load a serialized DB string or object."); - } + this.loadJSONObject(obj, options); } + this.emit("load", this); }); } @@ -901,7 +919,7 @@ export class Loki extends LokiEventEmitter { // check if the adapter is requesting (and supports) a 'reference' mode export if (this._persistenceAdapter.mode === "reference" && typeof this._persistenceAdapter.exportDatabase === "function") { // filename may seem redundant but loadDatabase will need to expect this same filename - return Promise.resolve(this._persistenceAdapter.exportDatabase(this.filename, this.copy({removeNonSerializable: true}))) + return Promise.resolve(this._persistenceAdapter.exportDatabase(this.filename, this._copy())) .then(() => { this._autosaveClearFlags(); this.emit("save"); @@ -1068,20 +1086,6 @@ export namespace Loki { started?: Date; } - export interface Serialized { - _env: Environment; - _serializationMethod: SerializationMethod; - _autosave: boolean; - _autosaveInterval: number; - _collections: Collection[]; - databaseVersion: number; - engineVersion: number; - filename: string; - _persistenceAdapter: StorageAdapter; - _persistenceMethod: PersistenceMethod; - _throttledSaves: boolean; - } - export type LoadDatabaseOptions = Collection.DeserializeOptions & ThrottledDrainOptions; export type SerializationMethod = "normal" | "pretty" | "destructured"; diff --git a/packages/loki/src/result_set.ts b/packages/loki/src/result_set.ts index 6f724b4e..2d40eb8f 100644 --- a/packages/loki/src/result_set.ts +++ b/packages/loki/src/result_set.ts @@ -3,7 +3,8 @@ import { clone, CloneMethod } from "./clone"; import { ltHelper, gtHelper, aeqHelper, sortHelper } from "./helper"; import { Doc } from "../../common/types"; import { Scorer } from "../../full-text-search/src/scorer"; -import { Query as FullTextSearchQuery } from "../../full-text-search/src/query_types"; +import {Query as FullTextSearchQuery } from "../../full-text-search/src/query_types"; +import { Serialization } from "./serialization/migration"; // used to recursively scan hierarchical transform step object for param substitution function resolveTransformObject(subObj: Collection.Transform, params: object, depth: number = 0): Collection.Transform { @@ -302,10 +303,20 @@ export class ResultSet { - const copy = this.copy(); - copy._collection = null; - return copy; + public toJSON(): Serialization.ResultSet { + return { + filterInitialized: this._filterInitialized, + filteredRows: this._filteredRows, + scoring: this._scoring + }; + } + + public static fromJSONObject(collection: Collection, obj: Serialization.ResultSet): ResultSet { + let rs = new ResultSet(collection); + rs._filterInitialized = obj.filterInitialized; + rs._filteredRows = obj.filteredRows; + rs._scoring = obj.scoring; + return rs; } /** @@ -483,7 +494,7 @@ export class ResultSet { + this._filteredRows = this._collection._binaryIndices[propname].values.filter((n: number) => { return io[n]; }); @@ -1238,63 +1249,63 @@ export namespace ResultSet { R extends object ? keyof R | (keyof R)[] : never; export type LokiOps = { - $eq?: R; + $eq: R; } | { - $aeq?: R; + $aeq: R; } | { - $ne?: R; + $ne: R; } | { - $dteq?: Date; + $dteq: Date; } | { - $gt?: R; + $gt: R; } | { - $gte?: R; + $gte: R; } | { - $lt?: R; + $lt: R; } | { - $lte?: R; + $lte: R; } | { - $between?: [R, R]; + $between: [R, R]; } | { - $in?: R[]; + $in: R[]; } | { - $nin?: R[]; + $nin: R[]; } | { - $keyin?: object; + $keyin: object; } | { - $nkeyin?: object; + $nkeyin: object; } | { - $definedin?: object; + $definedin: object; } | { - $undefinedin?: object; + $undefinedin: object; } | { - $regex?: RegExp | string | [string, string] // string and [string, string] are better for serialization + $regex: RegExp | string | [string, string] // string and [string, string] are better for serialization } | { - $containsNone?: ContainsHelperType; + $containsNone: ContainsHelperType; } | { - $containsAny?: ContainsHelperType; + $containsAny: ContainsHelperType; } | { - $contains?: ContainsHelperType; + $contains: ContainsHelperType; } | { - $type?: string; + $type: string; } | { - $finite?: boolean; + $finite: boolean; } | { - $size?: number; + $size: number; } | { - $len?: number; + $len: number; } | { - $where?: (val?: R) => boolean; + $where: (val: R) => boolean; } | { - $jgt?: R; + $jgt: R; } | { - $jgte?: R; + $jgte: R; } | { - $jlt?: R; + $jlt: R; } | { - $jlte?: R; + $jlte: R; } | { - $jbetween?: [R, R]; + $jbetween: [R, R]; }; export type Query = diff --git a/packages/loki/src/serialization/migration.ts b/packages/loki/src/serialization/migration.ts new file mode 100644 index 00000000..8170f77f --- /dev/null +++ b/packages/loki/src/serialization/migration.ts @@ -0,0 +1,107 @@ +import { V2_0, V2_0 as Serialization } from "./v2_0"; +import { V1_5 } from "./v1_5"; +import { Dict } from "../../../common/types"; + +export {Serialization}; + +export type MergeRightBiased = + TLeft extends any[] ? TRight : + TRight extends any[] ? TRight : + TRight extends Function ? TRight : + TLeft extends object ? + TRight extends object ? { + // All properties of Left and Right, recursive + [P in keyof TLeft & keyof TRight]: MergeRightBiased + } & { + // All properties of Left not in Right + [P in Exclude]: TLeft[P]; + } & { + // All properties of Right not in Left + [P in Exclude]: TRight[P] + } + // Prefer Right + : TRight + : TRight; + +function isObject(t: any): t is object { + return t !== null && typeof t === "object" && !Array.isArray(t); +} + +/** + * Merges two objects to one using a proxy. + * The properties of the right object are preferred. + * @param {TLeft} left - the unfavored left object + * @param {TRight} right - the favoured right object + * @returns {MergeRightBiased} + * @hidden + */ +export function mergeRightBiasedWithProxy(left: TLeft, right: TRight): MergeRightBiased { + return new Proxy({}, + { + get: function (target, prop) { + if (target.hasOwnProperty(prop)) { + return target[prop]; + } + if (isObject(right) && right.hasOwnProperty(prop)) { + if (isObject(right[prop]) && isObject(left) && isObject(left[prop])) { + return mergeRightBiasedWithProxy(left[prop], right[prop]); + } + return right[prop]; + } + if (isObject(left) && left.hasOwnProperty(prop)) { + return left[prop]; + } + return undefined; + } + } + ) as any; +} + +function migrateV1_5toV2_0(obj: V1_5.Loki): V2_0.Loki { + + function migrateCloneMethod(clone: V1_5.CloneMethod): V2_0.CloneMethod { + switch (clone) { + case "jquery-extend-deep": + return "deep"; + case "shallow-assign": + return "shallow"; + case "shallow-recurse-objects": + return "shallow-recurse"; + default: + return clone; + } + } + + return mergeRightBiasedWithProxy(obj, + { + databaseVersion: 2.0 as 2.0, + collections: obj.collections.map(coll => mergeRightBiasedWithProxy(coll, { + dynamicViews: coll.DynamicViews.map(dv => mergeRightBiasedWithProxy(dv, { + persistent: dv.options.persistent, + sortPriority: dv.options.sortPriority, + minRebuildInterval: dv.options.minRebuildInterval, + resultSet: mergeRightBiasedWithProxy(dv.resultset, { + filteredRows: dv.resultset.filteredrows, + scoring: null + }), + sortByScoring: false, + sortCriteriaSimple: { + field: dv.sortCriteriaSimple.propname + }, + })), + cloneMethod: migrateCloneMethod(coll.cloneMethod), + transforms: coll.transforms as any as Dict, // TODO not accurate + nestedProperties: [], + ttl: undefined, + ttlInterval: undefined, + fullTextSearch: null, + })) + }); +} + +export function migrateDatabase(obj: Serialization.Serialized): Serialization.Loki { + if (obj.databaseVersion === 1.5) { + return migrateDatabase(migrateV1_5toV2_0(obj as V1_5.Loki)); + } + return obj as Serialization.Loki; +} diff --git a/packages/loki/src/serialization/v1_5.ts b/packages/loki/src/serialization/v1_5.ts new file mode 100644 index 00000000..228f0be7 --- /dev/null +++ b/packages/loki/src/serialization/v1_5.ts @@ -0,0 +1,157 @@ +import { Dict } from "../../../common/types"; + +export namespace V1_5 { + + export interface Serialized { + databaseVersion: number; + } + + export type Doc = { + $loki: number; + meta?: { + created: number; + revision: number; + version: number, + updated?: number; + }; + [index: string]: any; + [index: number]: any; + }; + + export interface Loki { + filename: string; + collections: Collection[]; + databaseVersion: 1.5; + engineVersion: number; + throttledSaves: boolean; + ENV: "NODEJS" | "NATIVESCRIPT" | "CORDOVA" | "BROWSER"; + } + + export interface BinaryIndex { + name: string; + dirty: boolean; + values: number[]; + } + + export type Transform = { + type: "find"; + value: ResultSet.Query | string; + } | { + type: "where"; + value: ((obj: Doc) => boolean) | string; + } | { + type: "simplesort"; + property: string; + options?: boolean | ResultSet.SimpleSortOptions; + } | { + type: "compoundsort"; + value: (string | [string, boolean])[]; + } | { + type: "sort"; + value: (a: Doc, b: Doc) => number; + } | { + type: "limit"; + value: number; + } | { + type: "offset"; + value: number; + } | { + type: "map"; + value: (obj: Doc, index: number, array: Doc[]) => any; + dataOptions?: ResultSet.DataOptions; + } | { + type: "eqJoin"; + joinData: Collection | ResultSet; + leftJoinKey: string | ((obj: any) => string); + rightJoinKey: string | ((obj: any) => string); + mapFun?: (left: any, right: any) => any; + dataOptions?: ResultSet.DataOptions; + } | { + type: "mapReduce"; + mapFunction: 2, //(item: Doc, index: number, array: Doc[]) => any; + reduceFunction: (array: any[]) => any; + } | { + type: "update"; + value: (obj: Doc) => any; + } | { + type: "remove"; + }; + + export interface Collection { + name: string; + data: Doc[]; + idIndex: number[]; + binaryIndices: { + [key: string]: BinaryIndex; + }; + uniqueNames: string[]; + transforms: Dict; + objType: string; // ?? + dirty: boolean; // ?? + asyncListeners: boolean; + adaptiveBinaryIndices: boolean; + transactional: boolean; + cloneObjects: boolean; + cloneMethod: CloneMethod; + disableMeta: boolean; + disableChangesApi: boolean; + disableDeltaChangesApi: boolean; + autoupdate: boolean; + serializableIndices: boolean; + maxId: number; + DynamicViews: DynamicView[]; + events: {}; + changes: any[]; + } + + export interface ResultSet { + filteredrows: number[]; + filterInitialized: boolean; + } + + export namespace ResultSet { + + export interface SimpleSortOptions { + desc?: boolean; + disableIndexIntersect?: boolean; + forceIndexIntersect?: boolean; + useJavascriptSorting?: boolean; + } + + export interface DataOptions { + forceClones: boolean; + forceCloneMethod: CloneMethod; + removeMeta: boolean; + } + + export type Query = any; + } + + export type CloneMethod = "parse-stringify" | "jquery-extend-deep" | "shallow" | "shallow-assign" | "shallow-recurse-objects"; + + export interface DynamicView { + name: string; + rebuildPending: boolean; + options: { + persistent: true; + sortPriority: "passive" | "active"; + minRebuildInterval: number; + }; + resultset: ResultSet; + filterPipeline: ({ + type: "find"; + val: ResultSet.Query; + uid: number + } | { + type: "where"; + val: (doc: Doc) => boolean; + uid: number + })[]; + sortCriteria: (string | [string, boolean])[]; + sortCriteriaSimple: { + propname: string; + options: boolean | ResultSet.SimpleSortOptions; + }; + sortDirty: boolean; + } +} diff --git a/packages/loki/src/serialization/v2_0.ts b/packages/loki/src/serialization/v2_0.ts new file mode 100644 index 00000000..7239a43d --- /dev/null +++ b/packages/loki/src/serialization/v2_0.ts @@ -0,0 +1,357 @@ +import { Dict } from "../../../common/types"; + +export namespace V2_0 { + + export interface Serialized { + databaseVersion: number; + } + + export interface Loki { + collections: Collection[]; + databaseVersion: 2.0; + engineVersion: number; + filename: string; + } + + export type Doc = { + $loki: number; + meta?: { + created: number; + revision: number; + version: number, + updated?: number; + }; + [index: string]: any; + [index: number]: any; + }; + + export interface Collection { + name: string; + dynamicViews: DynamicView[]; + nestedProperties: { name: string, path: string[] }[]; + uniqueNames: string[]; + transforms: Dict; + binaryIndices: Dict; + data: Doc[]; + idIndex: number[]; + maxId: number; + dirty: boolean; + adaptiveBinaryIndices: boolean; + transactional: boolean; + asyncListeners: boolean; + disableMeta: boolean; + disableChangesApi: boolean; + disableDeltaChangesApi: boolean; + cloneObjects: boolean; + cloneMethod: CloneMethod; + serializableIndices: boolean; + ttl: number; + ttlInterval: number; + changes: any; + fullTextSearch: FullTextSearch; + } + + export interface BinaryIndex { + dirty: boolean; + values: number[]; + } + + export type CloneMethod = "parse-stringify" | "deep" | "shallow" | "shallow-recurse"; + + export type Transform = { + type: "find"; + value: Query | string; + } | { + type: "where"; + value: ((obj: Doc) => boolean) | string; + } | { + type: "simplesort"; + property: string; + options?: boolean | ResultSet.SimpleSortOptions; + } | { + type: "compoundsort"; + value: (string | [string, boolean])[]; + } | { + type: "sort"; + value: (a: Doc, b: Doc) => number; + } | { + type: "sortByScoring"; + desc?: boolean; + } | { + type: "limit"; + value: number; + } | { + type: "offset"; + value: number; + } | { + type: "map"; + value: (obj: Doc, index: number, array: Doc[]) => any; + dataOptions?: ResultSet.DataOptions; + } | { + type: "eqJoin"; + joinData: any; // Collection | ResultSet; + leftJoinKey: string | ((obj: any) => string); + rightJoinKey: string | ((obj: any) => string); + mapFun?: (left: any, right: any) => any; + dataOptions?: ResultSet.DataOptions; + } | { + type: "mapReduce"; + mapFunction: (item: Doc, index: number, array: Doc[]) => any; + reduceFunction: (array: any[]) => any; + } | { + type: "update"; + value: (obj: Doc) => any; + } | { + type: "remove"; + }; + + + export interface DynamicView { + name: string; + persistent: boolean; + sortPriority: "passive" | "active"; + minRebuildInterval: number; + resultSet: ResultSet; + filterPipeline: Filter[]; + sortCriteria: (string | [string, boolean])[]; + sortCriteriaSimple: { field: string, options: boolean | ResultSet.SimpleSortOptions }; + sortByScoring: boolean; + sortDirty: boolean; + } + + export interface ResultSet { + filterInitialized: boolean; + filteredRows: number[]; + scoring: FullTextSearch.ScoreResults; + } + + export namespace ResultSet { + export interface DataOptions { + forceClones?: boolean; + forceCloneMethod?: CloneMethod; + removeMeta?: boolean; + } + + export interface SimpleSortOptions { + desc?: boolean; + disableIndexIntersect?: boolean; + forceIndexIntersect?: boolean; + useJavascriptSorting?: boolean; + } + } + + export type Filter = { + type: "find"; + val: Query; + uid: number | string; + } | { + type: "where"; + val: (obj: Doc) => boolean; + uid: number | string; + }; + + export type LokiOps = { + $eq: any; + } | { + $aeq: any; + } | { + $ne: any; + } | { + $dteq: Date; + } | { + $gt: any; + } | { + $gte: any; + } | { + $lt: any; + } | { + $lte: any; + } | { + $between: [any, any]; + } | { + $in: any[]; + } | { + $nin: any[]; + } | { + $keyin: object; + } | { + $nkeyin: object; + } | { + $definedin: object; + } | { + $undefinedin: object; + } | { + $regex: RegExp | string | [string, string] // string and [string, string] are better for serialization + } | { + $containsNone: any; + } | { + $containsAny: any; + } | { + $contains: any; + } | { + $type: string; + } | { + $finite: boolean; + } | { + $size: number; + } | { + $len: number; + } | { + $where: (val: any) => boolean; + } | { + $jgt: any; + } | { + $jgte: any; + } | { + $jlt: any; + } | { + $jlte: any; + } | { + $jbetween: [any, any]; + }; + + export type Query = + { [P: string]: LokiOps | any } + | { $and?: Query[] } + | { $or?: Query[] } + | { $fts?: FullTextSearch.Query }; + + export interface FullTextSearch { + id: string; + ii: Dict; + } + + export namespace FullTextSearch { + export type DocumentIndex = number | string; + + export type InvertedIndex = SpareSerialized | FullSerialized; + + export interface Index { + d?: { + df: number; + dc: [DocumentIndex, number][] + }; + k?: number[]; + v?: Index[]; + } + + export type SpareSerialized = { + store: false; + optimizeChanges: boolean; + }; + + export type FullSerialized = { + store: true; + optimizeChanges: boolean; + docCount: number; + docStore: [DocumentIndex, DocStore][]; + totalFieldLength: number; + root: Index; + }; + + export interface DocStore { + fieldLength?: number; + } + + export interface BaseQuery { + type: Type; + boost?: number; + } + + export interface TermQuery extends BaseQuery<"term"> { + field: string; + value: string; + } + + export interface TermsQuery extends BaseQuery<"terms"> { + field: string; + value: string[]; + } + + export interface WildcardQuery extends BaseQuery<"wildcard"> { + field: string; + value: string; + enable_scoring?: boolean; + } + + export interface FuzzyQuery extends BaseQuery<"fuzzy"> { + field: string; + value: string; + fuzziness?: 0 | 1 | 2 | "AUTO"; + prefix_length?: number; + extended?: boolean; + } + + export interface PrefixQuery extends BaseQuery<"prefix"> { + field: string; + value: string; + enable_scoring?: boolean; + } + + export interface ExistsQuery extends BaseQuery<"exists"> { + /// The field name. + field: string; + } + + export interface MatchQuery extends BaseQuery<"match"> { + field: string; + value: string; + minimum_should_match?: number; + operator?: "and" | "or"; + fuzziness?: 0 | 1 | 2 | "AUTO"; + prefix_length?: number; + extended?: boolean; + } + + export interface MatchQueryAll extends BaseQuery<"match_all"> { + } + + export interface ConstantScoreQuery extends BaseQuery<"constant_score"> { + filter: QueryTypes[]; + } + + export interface BoolQuery extends BaseQuery<"bool"> { + must?: QueryTypes[]; + filter?: QueryTypes[]; + should?: QueryTypes[]; + not?: QueryTypes[]; + minimum_should_match?: number; + } + + export type QueryTypes = BoolQuery | ConstantScoreQuery | TermQuery | TermsQuery | WildcardQuery | FuzzyQuery + | MatchQuery | MatchQueryAll | PrefixQuery | ExistsQuery; + + export interface Query { + query: QueryTypes; + calculate_scoring?: boolean; + explain?: boolean; + bm25?: { + k1: number; + b: number; + }; + } + + + export interface BM25Explanation { + boost: number; + score: number; + docID: number; + fieldName: string; + index: string; + idf: number; + tfNorm: number; + tf: number; + fieldLength: number; + avgFieldLength: number; + } + + export interface ConstantExplanation { + boost: number; + score: number; + } + + export type ScoreExplanation = BM25Explanation | ConstantExplanation; + export type ScoreResult = { score: number, explanation?: ScoreExplanation[] }; + export type ScoreResults = Dict; + } +} diff --git a/packages/lokijs/loki-fs-structured-adapter.js b/packages/lokijs/loki-fs-structured-adapter.js new file mode 100644 index 00000000..5b2c13e0 --- /dev/null +++ b/packages/lokijs/loki-fs-structured-adapter.js @@ -0,0 +1,263 @@ + +/* + Loki (node) fs structured Adapter (need to require this script to instance and use it). + + This adapter will save database container and each collection to separate files and + save collection only if it is dirty. It is also designed to use a destructured serialization + method intended to lower the memory overhead of json serialization. + + This adapter utilizes ES6 generator/iterator functionality to stream output and + uses node linereader module to stream input. This should lower memory pressure + in addition to individual object serializations rather than loki's default deep object + serialization. +*/ + +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD + define([], factory); + } else if (typeof exports === 'object') { + // Node, CommonJS-like + module.exports = factory(); + } else { + // Browser globals (root is window) + root.LokiFsStructuredAdapter = factory(); + } +}(this, function () { + return (function() { + + const fs = require('fs'); + const readline = require('readline'); + const stream = require('stream'); + + /** + * Loki structured (node) filesystem adapter class. + * This class fulfills the loki 'reference' abstract adapter interface which can be applied to other storage methods. + * + * @constructor LokiFsStructuredAdapter + * + */ + function LokiFsStructuredAdapter() + { + this.mode = "reference"; + this.dbref = null; + this.dirtyPartitions = []; + } + + /** + * Generator for constructing lines for file streaming output of db container or collection. + * + * @param {object=} options - output format options for use externally to loki + * @param {int=} options.partition - can be used to only output an individual collection or db (-1) + * + * @returns {string|array} A custom, restructured aggregation of independent serializations. + * @memberof LokiFsStructuredAdapter + */ + LokiFsStructuredAdapter.prototype.generateDestructured = function*(options) { + var idx, sidx; + var dbcopy; + + options = options || {}; + + if (!options.hasOwnProperty("partition")) { + options.partition = -1; + } + + // if partition is -1 we will return database container with no data + if (options.partition === -1) { + // instantiate lightweight clone and remove its collection data + dbcopy = this.dbref.copy(); + + for(idx=0; idx < dbcopy.collections.length; idx++) { + dbcopy.collections[idx].data = []; + } + + yield dbcopy.serialize({ + serializationMethod: "normal" + }); + + return; + } + + // 'partitioned' along with 'partition' of 0 or greater is a request for single collection serialization + if (options.partition >= 0) { + var doccount, + docidx; + + // dbref collections have all data so work against that + doccount = this.dbref.collections[options.partition].data.length; + + for(docidx=0; docidx 0) { + self.loadNextCollection(dbname, 0, function() { + callback(self.dbref); + }); + } + }); + } + else { + // file does not exist, so callback with null + callback(null); + } + }); + }; + + /** + * Recursive function to chain loading of each collection one at a time. + * If at some point i can determine how to make async driven generator, this may be converted to generator. + * + * @param {string} dbname - the name to give the serialized database within the catalog. + * @param {int} collectionIndex - the ordinal position of the collection to load. + * @param {function} callback - callback to pass to next invocation or to call when done + * @memberof LokiFsStructuredAdapter + */ + LokiFsStructuredAdapter.prototype.loadNextCollection = function(dbname, collectionIndex, callback) { + var instream = fs.createReadStream(dbname + "." + collectionIndex); + var outstream = new stream(); + var rl = readline.createInterface(instream, outstream); + var self=this, + obj; + + rl.on('line', function (line) { + if (line !== "") { + obj = JSON.parse(line); + self.dbref.collections[collectionIndex].data.push(obj); + } + }); + + rl.on('close', function (line) { + instream = null; + outstream = null; + rl = null; + obj = null; + + // if there are more collections, load the next one + if (++collectionIndex < self.dbref.collections.length) { + self.loadNextCollection(dbname, collectionIndex, callback); + } + // otherwise we are done, callback to loadDatabase so it can return the new db object representation. + else { + callback(); + } + }); + }; + + /** + * Generator for yielding sequence of dirty partition indices to iterate. + * + * @memberof LokiFsStructuredAdapter + */ + LokiFsStructuredAdapter.prototype.getPartition = function*() { + var idx, + clen = this.dbref.collections.length; + + // since database container (partition -1) doesn't have dirty flag at db level, always save + yield -1; + + // yield list of dirty partitions for iterateration + for(idx=0; idx + * + * A lightweight document oriented javascript database + */ +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD + define([], factory); + } else if (typeof exports === 'object') { + // CommonJS + module.exports = factory(); + } else { + // Browser globals + root.loki = factory(); + } +}(this, function () { + + return (function () { + 'use strict'; + + var hasOwnProperty = Object.prototype.hasOwnProperty; + + var Utils = { + copyProperties: function (src, dest) { + var prop; + for (prop in src) { + dest[prop] = src[prop]; + } + }, + // used to recursively scan hierarchical transform step object for param substitution + resolveTransformObject: function (subObj, params, depth) { + var prop, + pname; + + if (typeof depth !== 'number') { + depth = 0; + } + + if (++depth >= 10) return subObj; + + for (prop in subObj) { + if (typeof subObj[prop] === 'string' && subObj[prop].indexOf("[%lktxp]") === 0) { + pname = subObj[prop].substring(8); + if (params.hasOwnProperty(pname)) { + subObj[prop] = params[pname]; + } + } else if (typeof subObj[prop] === "object") { + subObj[prop] = Utils.resolveTransformObject(subObj[prop], params, depth); + } + } + + return subObj; + }, + // top level utility to resolve an entire (single) transform (array of steps) for parameter substitution + resolveTransformParams: function (transform, params) { + var idx, + clonedStep, + resolvedTransform = []; + + if (typeof params === 'undefined') return transform; + + // iterate all steps in the transform array + for (idx = 0; idx < transform.length; idx++) { + // clone transform so our scan/replace can operate directly on cloned transform + clonedStep = clone(transform[idx], "shallow-recurse-objects"); + resolvedTransform.push(Utils.resolveTransformObject(clonedStep, params)); + } + + return resolvedTransform; + } + }; + + // wrapping in object to expose to default export for potential user override. + // warning: overriding these methods will override behavior for all loki db instances in memory. + // warning: if you use binary indices these comparators should be the same for all inserts/updates/removes. + var Comparators = { + aeq: aeqHelper, + lt: ltHelper, + gt: gtHelper + }; + + /** Helper function for determining 'loki' abstract equality which is a little more abstract than == + * aeqHelper(5, '5') === true + * aeqHelper(5.0, '5') === true + * aeqHelper(new Date("1/1/2011"), new Date("1/1/2011")) === true + * aeqHelper({a:1}, {z:4}) === true (all objects sorted equally) + * aeqHelper([1, 2, 3], [1, 3]) === false + * aeqHelper([1, 2, 3], [1, 2, 3]) === true + * aeqHelper(undefined, null) === true + */ + function aeqHelper(prop1, prop2) { + var cv1, cv2, t1, t2; + + if (prop1 === prop2) return true; + + // 'falsy' and Boolean handling + if (!prop1 || !prop2 || prop1 === true || prop2 === true || prop1 !== prop1 || prop2 !== prop2) { + // dates and NaN conditions (typed dates before serialization) + switch (prop1) { + case undefined: t1 = 1; break; + case null: t1 = 1; break; + case false: t1 = 3; break; + case true: t1 = 4; break; + case "": t1 = 5; break; + default: t1 = (prop1 === prop1)?9:0; break; + } + + switch (prop2) { + case undefined: t2 = 1; break; + case null: t2 = 1; break; + case false: t2 = 3; break; + case true: t2 = 4; break; + case "": t2 = 5; break; + default: t2 = (prop2 === prop2)?9:0; break; + } + + // one or both is edge case + if (t1 !== 9 || t2 !== 9) { + return (t1===t2); + } + } + + // Handle 'Number-like' comparisons + cv1 = Number(prop1); + cv2 = Number(prop2); + + // if one or both are 'number-like'... + if (cv1 === cv1 || cv2 === cv2) { + return (cv1 === cv2); + } + + // not strict equal nor less than nor gt so must be mixed types, convert to string and use that to compare + cv1 = prop1.toString(); + cv2 = prop2.toString(); + + return (cv1 == cv2); + } + + /** Helper function for determining 'less-than' conditions for ops, sorting, and binary indices. + * In the future we might want $lt and $gt ops to use their own functionality/helper. + * Since binary indices on a property might need to index [12, NaN, new Date(), Infinity], we + * need this function (as well as gtHelper) to always ensure one value is LT, GT, or EQ to another. + */ + function ltHelper(prop1, prop2, equal) { + var cv1, cv2, t1, t2; + + // if one of the params is falsy or strictly true or not equal to itself + // 0, 0.0, "", NaN, null, undefined, not defined, false, true + if (!prop1 || !prop2 || prop1 === true || prop2 === true || prop1 !== prop1 || prop2 !== prop2) { + switch (prop1) { + case undefined: t1 = 1; break; + case null: t1 = 1; break; + case false: t1 = 3; break; + case true: t1 = 4; break; + case "": t1 = 5; break; + // if strict equal probably 0 so sort higher, otherwise probably NaN so sort lower than even null + default: t1 = (prop1 === prop1)?9:0; break; + } + + switch (prop2) { + case undefined: t2 = 1; break; + case null: t2 = 1; break; + case false: t2 = 3; break; + case true: t2 = 4; break; + case "": t2 = 5; break; + default: t2 = (prop2 === prop2)?9:0; break; + } + + // one or both is edge case + if (t1 !== 9 || t2 !== 9) { + return (t1===t2)?equal:(t1 cv2) return false; + return equal; + } + + if (cv1 === cv1 && cv2 !== cv2) { + return true; + } + + if (cv2 === cv2 && cv1 !== cv1) { + return false; + } + + if (prop1 < prop2) return true; + if (prop1 > prop2) return false; + if (prop1 == prop2) return equal; + + // not strict equal nor less than nor gt so must be mixed types, convert to string and use that to compare + cv1 = prop1.toString(); + cv2 = prop2.toString(); + + if (cv1 < cv2) { + return true; + } + + if (cv1 == cv2) { + return equal; + } + + return false; + } + + function gtHelper(prop1, prop2, equal) { + var cv1, cv2, t1, t2; + + // 'falsy' and Boolean handling + if (!prop1 || !prop2 || prop1 === true || prop2 === true || prop1 !== prop1 || prop2 !== prop2) { + switch (prop1) { + case undefined: t1 = 1; break; + case null: t1 = 1; break; + case false: t1 = 3; break; + case true: t1 = 4; break; + case "": t1 = 5; break; + // NaN 0 + default: t1 = (prop1 === prop1)?9:0; break; + } + + switch (prop2) { + case undefined: t2 = 1; break; + case null: t2 = 1; break; + case false: t2 = 3; break; + case true: t2 = 4; break; + case "": t2 = 5; break; + default: t2 = (prop2 === prop2)?9:0; break; + } + + // one or both is edge case + if (t1 !== 9 || t2 !== 9) { + return (t1===t2)?equal:(t1>t2); + } + } + + // if both are numbers (string encoded or not), compare as numbers + cv1 = Number(prop1); + cv2 = Number(prop2); + if (cv1 === cv1 && cv2 === cv2) { + if (cv1 > cv2) return true; + if (cv1 < cv2) return false; + return equal; + } + + if (cv1 === cv1 && cv2 !== cv2) { + return false; + } + + if (cv2 === cv2 && cv1 !== cv1) { + return true; + } + + if (prop1 > prop2) return true; + if (prop1 < prop2) return false; + if (prop1 == prop2) return equal; + + // not strict equal nor less than nor gt so must be dates or mixed types + // convert to string and use that to compare + cv1 = prop1.toString(); + cv2 = prop2.toString(); + + if (cv1 > cv2) { + return true; + } + + if (cv1 == cv2) { + return equal; + } + + return false; + } + + function sortHelper(prop1, prop2, desc) { + if (Comparators.aeq(prop1, prop2)) return 0; + + if (Comparators.lt(prop1, prop2, false)) { + return (desc) ? (1) : (-1); + } + + if (Comparators.gt(prop1, prop2, false)) { + return (desc) ? (-1) : (1); + } + + // not lt, not gt so implied equality-- date compatible + return 0; + } + + /** + * compoundeval() - helper function for compoundsort(), performing individual object comparisons + * + * @param {array} properties - array of property names, in order, by which to evaluate sort order + * @param {object} obj1 - first object to compare + * @param {object} obj2 - second object to compare + * @returns {integer} 0, -1, or 1 to designate if identical (sortwise) or which should be first + */ + function compoundeval(properties, obj1, obj2) { + var res = 0; + var prop, field, val1, val2, arr; + for (var i = 0, len = properties.length; i < len; i++) { + prop = properties[i]; + field = prop[0]; + if (~field.indexOf('.')) { + arr = field.split('.'); + val1 = arr.reduce(function(obj, i) { return obj && obj[i] || undefined; }, obj1); + val2 = arr.reduce(function(obj, i) { return obj && obj[i] || undefined; }, obj2); + } else { + val1 = obj1[field]; + val2 = obj2[field]; + } + res = sortHelper(val1, val2, prop[1]); + if (res !== 0) { + return res; + } + } + return 0; + } + + /** + * dotSubScan - helper function used for dot notation queries. + * + * @param {object} root - object to traverse + * @param {array} paths - array of properties to drill into + * @param {function} fun - evaluation function to test with + * @param {any} value - comparative value to also pass to (compare) fun + * @param {number} poffset - index of the item in 'paths' to start the sub-scan from + */ + function dotSubScan(root, paths, fun, value, poffset) { + var pathOffset = poffset || 0; + var path = paths[pathOffset]; + if (root === undefined || root === null || !hasOwnProperty.call(root, path)) { + return false; + } + + var valueFound = false; + var element = root[path]; + if (pathOffset + 1 >= paths.length) { + // if we have already expanded out the dot notation, + // then just evaluate the test function and value on the element + valueFound = fun(element, value); + } else if (Array.isArray(element)) { + for (var index = 0, len = element.length; index < len; index += 1) { + valueFound = dotSubScan(element[index], paths, fun, value, pathOffset + 1); + if (valueFound === true) { + break; + } + } + } else { + valueFound = dotSubScan(element, paths, fun, value, pathOffset + 1); + } + + return valueFound; + } + + function containsCheckFn(a) { + if (typeof a === 'string' || Array.isArray(a)) { + return function (b) { + return a.indexOf(b) !== -1; + }; + } else if (typeof a === 'object' && a !== null) { + return function (b) { + return hasOwnProperty.call(a, b); + }; + } + return null; + } + + function doQueryOp(val, op) { + for (var p in op) { + if (hasOwnProperty.call(op, p)) { + return LokiOps[p](val, op[p]); + } + } + return false; + } + + var LokiOps = { + // comparison operators + // a is the value in the collection + // b is the query value + $eq: function (a, b) { + return a === b; + }, + + // abstract/loose equality + $aeq: function (a, b) { + return a == b; + }, + + $ne: function (a, b) { + // ecma 5 safe test for NaN + if (b !== b) { + // ecma 5 test value is not NaN + return (a === a); + } + + return a !== b; + }, + // date equality / loki abstract equality test + $dteq: function (a, b) { + return Comparators.aeq(a, b); + }, + + // loki comparisons: return identical unindexed results as indexed comparisons + $gt: function (a, b) { + return Comparators.gt(a, b, false); + }, + + $gte: function (a, b) { + return Comparators.gt(a, b, true); + }, + + $lt: function (a, b) { + return Comparators.lt(a, b, false); + }, + + $lte: function (a, b) { + return Comparators.lt(a, b, true); + }, + + // lightweight javascript comparisons + $jgt: function (a, b) { + return a > b; + }, + + $jgte: function (a, b) { + return a >= b; + }, + + $jlt: function (a, b) { + return a < b; + }, + + $jlte: function (a, b) { + return a <= b; + }, + + // ex : coll.find({'orderCount': {$between: [10, 50]}}); + $between: function (a, vals) { + if (a === undefined || a === null) return false; + return (Comparators.gt(a, vals[0], true) && Comparators.lt(a, vals[1], true)); + }, + + $jbetween: function (a, vals) { + if (a === undefined || a === null) return false; + return (a >= vals[0] && a <= vals[1]); + }, + + $in: function (a, b) { + return b.indexOf(a) !== -1; + }, + + $nin: function (a, b) { + return b.indexOf(a) === -1; + }, + + $keyin: function (a, b) { + return a in b; + }, + + $nkeyin: function (a, b) { + return !(a in b); + }, + + $definedin: function (a, b) { + return b[a] !== undefined; + }, + + $undefinedin: function (a, b) { + return b[a] === undefined; + }, + + $regex: function (a, b) { + return b.test(a); + }, + + $containsString: function (a, b) { + return (typeof a === 'string') && (a.indexOf(b) !== -1); + }, + + $containsNone: function (a, b) { + return !LokiOps.$containsAny(a, b); + }, + + $containsAny: function (a, b) { + var checkFn = containsCheckFn(a); + if (checkFn !== null) { + return (Array.isArray(b)) ? (b.some(checkFn)) : (checkFn(b)); + } + return false; + }, + + $contains: function (a, b) { + var checkFn = containsCheckFn(a); + if (checkFn !== null) { + return (Array.isArray(b)) ? (b.every(checkFn)) : (checkFn(b)); + } + return false; + }, + + $type: function (a, b) { + var type = typeof a; + if (type === 'object') { + if (Array.isArray(a)) { + type = 'array'; + } else if (a instanceof Date) { + type = 'date'; + } + } + return (typeof b !== 'object') ? (type === b) : doQueryOp(type, b); + }, + + $finite: function(a, b) { + return (b === isFinite(a)); + }, + + $size: function (a, b) { + if (Array.isArray(a)) { + return (typeof b !== 'object') ? (a.length === b) : doQueryOp(a.length, b); + } + return false; + }, + + $len: function (a, b) { + if (typeof a === 'string') { + return (typeof b !== 'object') ? (a.length === b) : doQueryOp(a.length, b); + } + return false; + }, + + $where: function (a, b) { + return b(a) === true; + }, + + // field-level logical operators + // a is the value in the collection + // b is the nested query operation (for '$not') + // or an array of nested query operations (for '$and' and '$or') + $not: function (a, b) { + return !doQueryOp(a, b); + }, + + $and: function (a, b) { + for (var idx = 0, len = b.length; idx < len; idx += 1) { + if (!doQueryOp(a, b[idx])) { + return false; + } + } + return true; + }, + + $or: function (a, b) { + for (var idx = 0, len = b.length; idx < len; idx += 1) { + if (doQueryOp(a, b[idx])) { + return true; + } + } + return false; + } + }; + + // if an op is registered in this object, our 'calculateRange' can use it with our binary indices. + // if the op is registered to a function, we will run that function/op as a 2nd pass filter on results. + // those 2nd pass filter functions should be similar to LokiOps functions, accepting 2 vals to compare. + var indexedOps = { + $eq: LokiOps.$eq, + $aeq: true, + $dteq: true, + $gt: true, + $gte: true, + $lt: true, + $lte: true, + $in: true, + $between: true + }; + + function clone(data, method) { + if (data === null || data === undefined) { + return null; + } + + var cloneMethod = method || 'parse-stringify', + cloned; + + switch (cloneMethod) { + case "parse-stringify": + cloned = JSON.parse(JSON.stringify(data)); + break; + case "jquery-extend-deep": + cloned = jQuery.extend(true, {}, data); + break; + case "shallow": + // more compatible method for older browsers + cloned = Object.create(data.constructor.prototype); + Object.keys(data).map(function (i) { + cloned[i] = data[i]; + }); + break; + case "shallow-assign": + // should be supported by newer environments/browsers + cloned = Object.create(data.constructor.prototype); + Object.assign(cloned, data); + break; + case "shallow-recurse-objects": + // shallow clone top level properties + cloned = clone(data, "shallow"); + var keys = Object.keys(data); + // for each of the top level properties which are object literals, recursively shallow copy + keys.forEach(function(key) { + if (typeof data[key] === "object" && data[key].constructor.name === "Object") { + cloned[key] = clone(data[key], "shallow-recurse-objects"); + } + }); + break; + default: + break; + } + + return cloned; + } + + function cloneObjectArray(objarray, method) { + var i, + result = []; + + if (method == "parse-stringify") { + return clone(objarray, method); + } + + i = objarray.length - 1; + + for (; i <= 0; i--) { + result.push(clone(objarray[i], method)); + } + + return result; + } + + function localStorageAvailable() { + try { + return (window && window.localStorage !== undefined && window.localStorage !== null); + } catch (e) { + return false; + } + } + + + /** + * LokiEventEmitter is a minimalist version of EventEmitter. It enables any + * constructor that inherits EventEmitter to emit events and trigger + * listeners that have been added to the event through the on(event, callback) method + * + * @constructor LokiEventEmitter + */ + function LokiEventEmitter() {} + + /** + * @prop {hashmap} events - a hashmap, with each property being an array of callbacks + * @memberof LokiEventEmitter + */ + LokiEventEmitter.prototype.events = {}; + + /** + * @prop {boolean} asyncListeners - boolean determines whether or not the callbacks associated with each event + * should happen in an async fashion or not + * Default is false, which means events are synchronous + * @memberof LokiEventEmitter + */ + LokiEventEmitter.prototype.asyncListeners = false; + + /** + * on(eventName, listener) - adds a listener to the queue of callbacks associated to an event + * @param {string|string[]} eventName - the name(s) of the event(s) to listen to + * @param {function} listener - callback function of listener to attach + * @returns {int} the index of the callback in the array of listeners for a particular event + * @memberof LokiEventEmitter + */ + LokiEventEmitter.prototype.on = function (eventName, listener) { + var event; + var self = this; + + if (Array.isArray(eventName)) { + eventName.forEach(function(currentEventName) { + self.on(currentEventName, listener); + }); + return listener; + } + + event = this.events[eventName]; + if (!event) { + event = this.events[eventName] = []; + } + event.push(listener); + return listener; + }; + + /** + * emit(eventName, data) - emits a particular event + * with the option of passing optional parameters which are going to be processed by the callback + * provided signatures match (i.e. if passing emit(event, arg0, arg1) the listener should take two parameters) + * @param {string} eventName - the name of the event + * @param {object=} data - optional object passed with the event + * @memberof LokiEventEmitter + */ + LokiEventEmitter.prototype.emit = function (eventName) { + var self = this; + var selfArgs = Array.prototype.slice.call(arguments, 1); + if (eventName && this.events[eventName]) { + this.events[eventName].forEach(function (listener) { + if (self.asyncListeners) { + setTimeout(function () { + listener.apply(self, selfArgs); + }, 1); + } else { + listener.apply(self, selfArgs); + } + + }); + } else { + throw new Error('No event ' + eventName + ' defined'); + } + }; + + /** + * Alias of LokiEventEmitter.prototype.on + * addListener(eventName, listener) - adds a listener to the queue of callbacks associated to an event + * @param {string|string[]} eventName - the name(s) of the event(s) to listen to + * @param {function} listener - callback function of listener to attach + * @returns {int} the index of the callback in the array of listeners for a particular event + * @memberof LokiEventEmitter + */ + LokiEventEmitter.prototype.addListener = LokiEventEmitter.prototype.on; + + /** + * removeListener() - removes the listener at position 'index' from the event 'eventName' + * @param {string|string[]} eventName - the name(s) of the event(s) which the listener is attached to + * @param {function} listener - the listener callback function to remove from emitter + * @memberof LokiEventEmitter + */ + LokiEventEmitter.prototype.removeListener = function (eventName, listener) { + var self = this; + + if (Array.isArray(eventName)) { + eventName.forEach(function(currentEventName) { + self.removeListener(currentEventName, listener); + }); + + return; + } + + if (this.events[eventName]) { + var listeners = this.events[eventName]; + listeners.splice(listeners.indexOf(listener), 1); + } + }; + + /** + * Loki: The main database class + * @constructor Loki + * @implements LokiEventEmitter + * @param {string} filename - name of the file to be saved to + * @param {object=} options - (Optional) config options object + * @param {string} options.env - override environment detection as 'NODEJS', 'BROWSER', 'CORDOVA' + * @param {boolean} [options.verbose=false] - enable console output + * @param {boolean} [options.autosave=false] - enables autosave + * @param {int} [options.autosaveInterval=5000] - time interval (in milliseconds) between saves (if dirty) + * @param {boolean} [options.autoload=false] - enables autoload on loki instantiation + * @param {function} options.autoloadCallback - user callback called after database load + * @param {adapter} options.adapter - an instance of a loki persistence adapter + * @param {string} [options.serializationMethod='normal'] - ['normal', 'pretty', 'destructured'] + * @param {string} options.destructureDelimiter - string delimiter used for destructured serialization + * @param {boolean} [options.throttledSaves=true] - debounces multiple calls to to saveDatabase reducing number of disk I/O operations + and guaranteeing proper serialization of the calls. + */ + function Loki(filename, options) { + this.filename = filename || 'loki.db'; + this.collections = []; + + // persist version of code which created the database to the database. + // could use for upgrade scenarios + this.databaseVersion = 1.5; + this.engineVersion = 1.5; + + // autosave support (disabled by default) + // pass autosave: true, autosaveInterval: 6000 in options to set 6 second autosave + this.autosave = false; + this.autosaveInterval = 5000; + this.autosaveHandle = null; + this.throttledSaves = true; + + this.options = {}; + + // currently keeping persistenceMethod and persistenceAdapter as loki level properties that + // will not or cannot be deserialized. You are required to configure persistence every time + // you instantiate a loki object (or use default environment detection) in order to load the database anyways. + + // persistenceMethod could be 'fs', 'localStorage', or 'adapter' + // this is optional option param, otherwise environment detection will be used + // if user passes their own adapter we will force this method to 'adapter' later, so no need to pass method option. + this.persistenceMethod = null; + + // retain reference to optional (non-serializable) persistenceAdapter 'instance' + this.persistenceAdapter = null; + + // flags used to throttle saves + this.throttledSavePending = false; + this.throttledCallbacks = []; + + // enable console output if verbose flag is set (disabled by default) + this.verbose = options && options.hasOwnProperty('verbose') ? options.verbose : false; + + this.events = { + 'init': [], + 'loaded': [], + 'flushChanges': [], + 'close': [], + 'changes': [], + 'warning': [] + }; + + var getENV = function () { + if (typeof global !== 'undefined' && (global.android || global.NSObject)) { + // If no adapter assume nativescript which needs adapter to be passed manually + return 'NATIVESCRIPT'; //nativescript + } + + if (typeof window === 'undefined') { + return 'NODEJS'; + } + + if (typeof global !== 'undefined' && global.window && process) { + return 'NODEJS'; //node-webkit + } + + if (typeof document !== 'undefined') { + if (document.URL.indexOf('http://') === -1 && document.URL.indexOf('https://') === -1) { + return 'CORDOVA'; + } + return 'BROWSER'; + } + return 'CORDOVA'; + }; + + // refactored environment detection due to invalid detection for browser environments. + // if they do not specify an options.env we want to detect env rather than default to nodejs. + // currently keeping two properties for similar thing (options.env and options.persistenceMethod) + // might want to review whether we can consolidate. + if (options && options.hasOwnProperty('env')) { + this.ENV = options.env; + } else { + this.ENV = getENV(); + } + + // not sure if this is necessary now that i have refactored the line above + if (this.ENV === 'undefined') { + this.ENV = 'NODEJS'; + } + + this.configureOptions(options, true); + + this.on('init', this.clearChanges); + + } + + // db class is an EventEmitter + Loki.prototype = new LokiEventEmitter(); + Loki.prototype.constructor = Loki; + + // experimental support for browserify's abstract syntax scan to pick up dependency of indexed adapter. + // Hopefully, once this hits npm a browserify require of lokijs should scan the main file and detect this indexed adapter reference. + Loki.prototype.getIndexedAdapter = function () { + var adapter; + + if (typeof require === 'function') { + adapter = require("./loki-indexed-adapter.js"); + } + + return adapter; + }; + + + /** + * Allows reconfiguring database options + * + * @param {object} options - configuration options to apply to loki db object + * @param {string} options.env - override environment detection as 'NODEJS', 'BROWSER', 'CORDOVA' + * @param {boolean} options.verbose - enable console output (default is 'false') + * @param {boolean} options.autosave - enables autosave + * @param {int} options.autosaveInterval - time interval (in milliseconds) between saves (if dirty) + * @param {boolean} options.autoload - enables autoload on loki instantiation + * @param {function} options.autoloadCallback - user callback called after database load + * @param {adapter} options.adapter - an instance of a loki persistence adapter + * @param {string} options.serializationMethod - ['normal', 'pretty', 'destructured'] + * @param {string} options.destructureDelimiter - string delimiter used for destructured serialization + * @param {boolean} initialConfig - (internal) true is passed when loki ctor is invoking + * @memberof Loki + */ + Loki.prototype.configureOptions = function (options, initialConfig) { + var defaultPersistence = { + 'NODEJS': 'fs', + 'BROWSER': 'localStorage', + 'CORDOVA': 'localStorage', + 'MEMORY': 'memory' + }, + persistenceMethods = { + 'fs': LokiFsAdapter, + 'localStorage': LokiLocalStorageAdapter, + 'memory': LokiMemoryAdapter + }; + + this.options = {}; + + this.persistenceMethod = null; + // retain reference to optional persistence adapter 'instance' + // currently keeping outside options because it can't be serialized + this.persistenceAdapter = null; + + // process the options + if (typeof (options) !== 'undefined') { + this.options = options; + + if (this.options.hasOwnProperty('persistenceMethod')) { + // check if the specified persistence method is known + if (typeof (persistenceMethods[options.persistenceMethod]) == 'function') { + this.persistenceMethod = options.persistenceMethod; + this.persistenceAdapter = new persistenceMethods[options.persistenceMethod](); + } + // should be throw an error here, or just fall back to defaults ?? + } + + // if user passes adapter, set persistence mode to adapter and retain persistence adapter instance + if (this.options.hasOwnProperty('adapter')) { + this.persistenceMethod = 'adapter'; + this.persistenceAdapter = options.adapter; + this.options.adapter = null; + } + + + // if they want to load database on loki instantiation, now is a good time to load... after adapter set and before possible autosave initiation + if (options.autoload && initialConfig) { + // for autoload, let the constructor complete before firing callback + var self = this; + setTimeout(function () { + self.loadDatabase(options, options.autoloadCallback); + }, 1); + } + + if (this.options.hasOwnProperty('autosaveInterval')) { + this.autosaveDisable(); + this.autosaveInterval = parseInt(this.options.autosaveInterval, 10); + } + + if (this.options.hasOwnProperty('autosave') && this.options.autosave) { + this.autosaveDisable(); + this.autosave = true; + + if (this.options.hasOwnProperty('autosaveCallback')) { + this.autosaveEnable(options, options.autosaveCallback); + } else { + this.autosaveEnable(); + } + } + + if (this.options.hasOwnProperty('throttledSaves')) { + this.throttledSaves = this.options.throttledSaves; + } + } // end of options processing + + // ensure defaults exists for options which were not set + if (!this.options.hasOwnProperty('serializationMethod')) { + this.options.serializationMethod = 'normal'; + } + + // ensure passed or default option exists + if (!this.options.hasOwnProperty('destructureDelimiter')) { + this.options.destructureDelimiter = '$<\n'; + } + + // if by now there is no adapter specified by user nor derived from persistenceMethod: use sensible defaults + if (this.persistenceAdapter === null) { + this.persistenceMethod = defaultPersistence[this.ENV]; + if (this.persistenceMethod) { + this.persistenceAdapter = new persistenceMethods[this.persistenceMethod](); + } + } + + }; + + /** + * Copies 'this' database into a new Loki instance. Object references are shared to make lightweight. + * + * @param {object} options - apply or override collection level settings + * @param {bool} options.removeNonSerializable - nulls properties not safe for serialization. + * @memberof Loki + */ + Loki.prototype.copy = function(options) { + // in case running in an environment without accurate environment detection, pass 'NA' + var databaseCopy = new Loki(this.filename, { env: "NA" }); + var clen, idx; + + options = options || {}; + + // currently inverting and letting loadJSONObject do most of the work + databaseCopy.loadJSONObject(this, { retainDirtyFlags: true }); + + // since our JSON serializeReplacer is not invoked for reference database adapters, this will let us mimic + if(options.hasOwnProperty("removeNonSerializable") && options.removeNonSerializable === true) { + databaseCopy.autosaveHandle = null; + databaseCopy.persistenceAdapter = null; + + clen = databaseCopy.collections.length; + for (idx=0; idx 0) { + throw new Error("disableMeta option cannot be passed as true when ttl is enabled"); + } + } + + for (i = 0; i < len; i += 1) { + if (this.collections[i].name === name) { + return this.collections[i]; + } + } + + var collection = new Collection(name, options); + this.collections.push(collection); + + if (this.verbose) + collection.console = console; + + return collection; + }; + + Loki.prototype.loadCollection = function (collection) { + if (!collection.name) { + throw new Error('Collection must have a name property to be loaded'); + } + this.collections.push(collection); + }; + + /** + * Retrieves reference to a collection by name. + * @param {string} collectionName - name of collection to look up + * @returns {Collection} Reference to collection in database by that name, or null if not found + * @memberof Loki + */ + Loki.prototype.getCollection = function (collectionName) { + var i, + len = this.collections.length; + + for (i = 0; i < len; i += 1) { + if (this.collections[i].name === collectionName) { + return this.collections[i]; + } + } + + // no such collection + this.emit('warning', 'collection ' + collectionName + ' not found'); + return null; + }; + + /** + * Renames an existing loki collection + * @param {string} oldName - name of collection to rename + * @param {string} newName - new name of collection + * @returns {Collection} reference to the newly renamed collection + * @memberof Loki + */ + Loki.prototype.renameCollection = function (oldName, newName) { + var c = this.getCollection(oldName); + + if (c) { + c.name = newName; + } + + return c; + }; + + /** + * Returns a list of collections in the database. + * @returns {object[]} array of objects containing 'name', 'type', and 'count' properties. + * @memberof Loki + */ + Loki.prototype.listCollections = function () { + + var i = this.collections.length, + colls = []; + + while (i--) { + colls.push({ + name: this.collections[i].name, + type: this.collections[i].objType, + count: this.collections[i].data.length + }); + } + return colls; + }; + + /** + * Removes a collection from the database. + * @param {string} collectionName - name of collection to remove + * @memberof Loki + */ + Loki.prototype.removeCollection = function (collectionName) { + var i, + len = this.collections.length; + + for (i = 0; i < len; i += 1) { + if (this.collections[i].name === collectionName) { + var tmpcol = new Collection(collectionName, {}); + var curcol = this.collections[i]; + for (var prop in curcol) { + if (curcol.hasOwnProperty(prop) && tmpcol.hasOwnProperty(prop)) { + curcol[prop] = tmpcol[prop]; + } + } + this.collections.splice(i, 1); + return; + } + } + }; + + Loki.prototype.getName = function () { + return this.name; + }; + + /** + * serializeReplacer - used to prevent certain properties from being serialized + * + */ + Loki.prototype.serializeReplacer = function (key, value) { + switch (key) { + case 'autosaveHandle': + case 'persistenceAdapter': + case 'constraints': + case 'ttl': + return null; + case 'throttledSavePending': + case 'throttledCallbacks': + return undefined; + default: + return value; + } + }; + + /** + * Serialize database to a string which can be loaded via {@link Loki#loadJSON} + * + * @returns {string} Stringified representation of the loki database. + * @memberof Loki + */ + Loki.prototype.serialize = function (options) { + options = options || {}; + + if (!options.hasOwnProperty("serializationMethod")) { + options.serializationMethod = this.options.serializationMethod; + } + + switch(options.serializationMethod) { + case "normal": return JSON.stringify(this, this.serializeReplacer); + case "pretty": return JSON.stringify(this, this.serializeReplacer, 2); + case "destructured": return this.serializeDestructured(); // use default options + default: return JSON.stringify(this, this.serializeReplacer); + } + }; + + // alias of serialize + Loki.prototype.toJson = Loki.prototype.serialize; + + /** + * Database level destructured JSON serialization routine to allow alternate serialization methods. + * Internally, Loki supports destructuring via loki "serializationMethod' option and + * the optional LokiPartitioningAdapter class. It is also available if you wish to do + * your own structured persistence or data exchange. + * + * @param {object=} options - output format options for use externally to loki + * @param {bool=} options.partitioned - (default: false) whether db and each collection are separate + * @param {int=} options.partition - can be used to only output an individual collection or db (-1) + * @param {bool=} options.delimited - (default: true) whether subitems are delimited or subarrays + * @param {string=} options.delimiter - override default delimiter + * + * @returns {string|array} A custom, restructured aggregation of independent serializations. + * @memberof Loki + */ + Loki.prototype.serializeDestructured = function(options) { + var idx, sidx, result, resultlen; + var reconstruct = []; + var dbcopy; + + options = options || {}; + + if (!options.hasOwnProperty("partitioned")) { + options.partitioned = false; + } + + if (!options.hasOwnProperty("delimited")) { + options.delimited = true; + } + + if (!options.hasOwnProperty("delimiter")) { + options.delimiter = this.options.destructureDelimiter; + } + + // 'partitioned' along with 'partition' of 0 or greater is a request for single collection serialization + if (options.partitioned === true && options.hasOwnProperty("partition") && options.partition >= 0) { + return this.serializeCollection({ + delimited: options.delimited, + delimiter: options.delimiter, + collectionIndex: options.partition + }); + } + + // not just an individual collection, so we will need to serialize db container via shallow copy + dbcopy = new Loki(this.filename); + dbcopy.loadJSONObject(this); + + for(idx=0; idx < dbcopy.collections.length; idx++) { + dbcopy.collections[idx].data = []; + } + + // if we -only- wanted the db container portion, return it now + if (options.partitioned === true && options.partition === -1) { + // since we are deconstructing, override serializationMethod to normal for here + return dbcopy.serialize({ + serializationMethod: "normal" + }); + } + + // at this point we must be deconstructing the entire database + // start by pushing db serialization into first array element + reconstruct.push(dbcopy.serialize({ + serializationMethod: "normal" + })); + + dbcopy = null; + + // push collection data into subsequent elements + for(idx=0; idx < this.collections.length; idx++) { + result = this.serializeCollection({ + delimited: options.delimited, + delimiter: options.delimiter, + collectionIndex: idx + }); + + // NDA : Non-Delimited Array : one iterable concatenated array with empty string collection partitions + if (options.partitioned === false && options.delimited === false) { + if (!Array.isArray(result)) { + throw new Error("a nondelimited, non partitioned collection serialization did not return an expected array"); + } + + // Array.concat would probably duplicate memory overhead for copying strings. + // Instead copy each individually, and clear old value after each copy. + // Hopefully this will allow g.c. to reduce memory pressure, if needed. + resultlen = result.length; + + for (sidx=0; sidx < resultlen; sidx++) { + reconstruct.push(result[sidx]); + result[sidx] = null; + } + + reconstruct.push(""); + } + else { + reconstruct.push(result); + } + } + + // Reconstruct / present results according to four combinations : D, DA, NDA, NDAA + if (options.partitioned) { + // DA : Delimited Array of strings [0] db [1] collection [n] collection { partitioned: true, delimited: true } + // useful for simple future adaptations of existing persistence adapters to save collections separately + if (options.delimited) { + return reconstruct; + } + // NDAA : Non-Delimited Array with subArrays. db at [0] and collection subarrays at [n] { partitioned: true, delimited : false } + // This format might be the most versatile for 'rolling your own' partitioned sync or save. + // Memory overhead can be reduced by specifying a specific partition, but at this code path they did not, so its all. + else { + return reconstruct; + } + } + else { + // D : one big Delimited string { partitioned: false, delimited : true } + // This is the method Loki will use internally if 'destructured'. + // Little memory overhead improvements but does not require multiple asynchronous adapter call scheduling + if (options.delimited) { + // indicate no more collections + reconstruct.push(""); + + return reconstruct.join(options.delimiter); + } + // NDA : Non-Delimited Array : one iterable array with empty string collection partitions { partitioned: false, delimited: false } + // This format might be best candidate for custom synchronous syncs or saves + else { + // indicate no more collections + reconstruct.push(""); + + return reconstruct; + } + } + + reconstruct.push(""); + + return reconstruct.join(delim); + }; + + /** + * Collection level utility method to serialize a collection in a 'destructured' format + * + * @param {object=} options - used to determine output of method + * @param {int} options.delimited - whether to return single delimited string or an array + * @param {string} options.delimiter - (optional) if delimited, this is delimiter to use + * @param {int} options.collectionIndex - specify which collection to serialize data for + * + * @returns {string|array} A custom, restructured aggregation of independent serializations for a single collection. + * @memberof Loki + */ + Loki.prototype.serializeCollection = function(options) { + var doccount, + docidx, + resultlines = []; + + options = options || {}; + + if (!options.hasOwnProperty("delimited")) { + options.delimited = true; + } + + if (!options.hasOwnProperty("collectionIndex")) { + throw new Error("serializeCollection called without 'collectionIndex' option"); + } + + doccount = this.collections[options.collectionIndex].data.length; + + resultlines = []; + + for(docidx=0; docidx collCount) { + done = true; + } + } + else { + currObject = JSON.parse(workarray[lineIndex]); + cdb.collections[collIndex].data.push(currObject); + } + + // lower memory pressure and advance iterator + workarray[lineIndex++] = null; + } + + return cdb; + }; + + /** + * Collection level utility function to deserializes a destructured collection. + * + * @param {string|array} destructuredSource - destructured representation of collection to inflate + * @param {object=} options - used to describe format of destructuredSource input + * @param {int=} [options.delimited=false] - whether source is delimited string or an array + * @param {string=} options.delimiter - if delimited, this is delimiter to use (if other than default) + * + * @returns {array} an array of documents to attach to collection.data. + * @memberof Loki + */ + Loki.prototype.deserializeCollection = function(destructuredSource, options) { + var workarray=[]; + var idx, len; + + options = options || {}; + + if (!options.hasOwnProperty("partitioned")) { + options.partitioned = false; + } + + if (!options.hasOwnProperty("delimited")) { + options.delimited = true; + } + + if (!options.hasOwnProperty("delimiter")) { + options.delimiter = this.options.destructureDelimiter; + } + + if (options.delimited) { + workarray = destructuredSource.split(options.delimiter); + workarray.pop(); + } + else { + workarray = destructuredSource; + } + + len = workarray.length; + for (idx=0; idx < len; idx++) { + workarray[idx] = JSON.parse(workarray[idx]); + } + + return workarray; + }; + + /** + * Inflates a loki database from a serialized JSON string + * + * @param {string} serializedDb - a serialized loki database string + * @param {object=} options - apply or override collection level settings + * @param {bool} options.retainDirtyFlags - whether collection dirty flags will be preserved + * @memberof Loki + */ + Loki.prototype.loadJSON = function (serializedDb, options) { + var dbObject; + if (serializedDb.length === 0) { + dbObject = {}; + } else { + // using option defined in instantiated db not what was in serialized db + switch (this.options.serializationMethod) { + case "normal": + case "pretty": dbObject = JSON.parse(serializedDb); break; + case "destructured": dbObject = this.deserializeDestructured(serializedDb); break; + default: dbObject = JSON.parse(serializedDb); break; + } + } + + this.loadJSONObject(dbObject, options); + }; + + /** + * Inflates a loki database from a JS object + * + * @param {object} dbObject - a serialized loki database string + * @param {object=} options - apply or override collection level settings + * @param {bool} options.retainDirtyFlags - whether collection dirty flags will be preserved + * @memberof Loki + */ + Loki.prototype.loadJSONObject = function (dbObject, options) { + var i = 0, + len = dbObject.collections ? dbObject.collections.length : 0, + coll, + copyColl, + clen, + j, + loader, + collObj; + + this.name = dbObject.name; + + // restore save throttled boolean only if not defined in options + if (dbObject.hasOwnProperty('throttledSaves') && options && !options.hasOwnProperty('throttledSaves')) { + this.throttledSaves = dbObject.throttledSaves; + } + + this.collections = []; + + function makeLoader(coll) { + var collOptions = options[coll.name]; + var inflater; + + if(collOptions.proto) { + inflater = collOptions.inflate || Utils.copyProperties; + + return function(data) { + var collObj = new(collOptions.proto)(); + inflater(data, collObj); + return collObj; + }; + } + + return collOptions.inflate; + } + + for (i; i < len; i += 1) { + coll = dbObject.collections[i]; + + copyColl = this.addCollection(coll.name, { disableChangesApi: coll.disableChangesApi, disableDeltaChangesApi: coll.disableDeltaChangesApi, disableMeta: coll.disableMeta }); + + copyColl.adaptiveBinaryIndices = coll.hasOwnProperty('adaptiveBinaryIndices')?(coll.adaptiveBinaryIndices === true): false; + copyColl.transactional = coll.transactional; + copyColl.asyncListeners = coll.asyncListeners; + copyColl.cloneObjects = coll.cloneObjects; + copyColl.cloneMethod = coll.cloneMethod || "parse-stringify"; + copyColl.autoupdate = coll.autoupdate; + copyColl.changes = coll.changes; + + if (options && options.retainDirtyFlags === true) { + copyColl.dirty = coll.dirty; + } + else { + copyColl.dirty = false; + } + + // load each element individually + clen = coll.data.length; + j = 0; + if (options && options.hasOwnProperty(coll.name)) { + loader = makeLoader(coll); + + for (j; j < clen; j++) { + collObj = loader(coll.data[j]); + copyColl.data[j] = collObj; + copyColl.addAutoUpdateObserver(collObj); + } + } else { + + for (j; j < clen; j++) { + copyColl.data[j] = coll.data[j]; + copyColl.addAutoUpdateObserver(copyColl.data[j]); + } + } + + copyColl.maxId = (typeof coll.maxId === 'undefined') ? 0 : coll.maxId; + copyColl.idIndex = coll.idIndex; + if (typeof (coll.binaryIndices) !== 'undefined') { + copyColl.binaryIndices = coll.binaryIndices; + } + if (typeof coll.transforms !== 'undefined') { + copyColl.transforms = coll.transforms; + } + + copyColl.ensureId(); + + // regenerate unique indexes + copyColl.uniqueNames = []; + if (coll.hasOwnProperty("uniqueNames")) { + copyColl.uniqueNames = coll.uniqueNames; + for (j = 0; j < copyColl.uniqueNames.length; j++) { + copyColl.ensureUniqueIndex(copyColl.uniqueNames[j]); + } + } + + // in case they are loading a database created before we added dynamic views, handle undefined + if (typeof (coll.DynamicViews) === 'undefined') continue; + + // reinflate DynamicViews and attached Resultsets + for (var idx = 0; idx < coll.DynamicViews.length; idx++) { + var colldv = coll.DynamicViews[idx]; + + var dv = copyColl.addDynamicView(colldv.name, colldv.options); + dv.resultdata = colldv.resultdata; + dv.resultsdirty = colldv.resultsdirty; + dv.filterPipeline = colldv.filterPipeline; + + dv.sortCriteria = colldv.sortCriteria; + dv.sortFunction = null; + + dv.sortDirty = colldv.sortDirty; + dv.resultset.filteredrows = colldv.resultset.filteredrows; + dv.resultset.filterInitialized = colldv.resultset.filterInitialized; + + dv.rematerialize({ + removeWhereFilters: true + }); + } + + // Upgrade Logic for binary index refactoring at version 1.5 + if (dbObject.databaseVersion < 1.5) { + // rebuild all indices + copyColl.ensureAllIndexes(true); + copyColl.dirty = true; + } + } + }; + + /** + * Emits the close event. In autosave scenarios, if the database is dirty, this will save and disable timer. + * Does not actually destroy the db. + * + * @param {function=} callback - (Optional) if supplied will be registered with close event before emitting. + * @memberof Loki + */ + Loki.prototype.close = function (callback) { + // for autosave scenarios, we will let close perform final save (if dirty) + // For web use, you might call from window.onbeforeunload to shutdown database, saving pending changes + if (this.autosave) { + this.autosaveDisable(); + if (this.autosaveDirty()) { + this.saveDatabase(callback); + callback = undefined; + } + } + + if (callback) { + this.on('close', callback); + } + this.emit('close'); + }; + + /**-------------------------+ + | Changes API | + +--------------------------*/ + + /** + * The Changes API enables the tracking the changes occurred in the collections since the beginning of the session, + * so it's possible to create a differential dataset for synchronization purposes (possibly to a remote db) + */ + + /** + * (Changes API) : takes all the changes stored in each + * collection and creates a single array for the entire database. If an array of names + * of collections is passed then only the included collections will be tracked. + * + * @param {array=} optional array of collection names. No arg means all collections are processed. + * @returns {array} array of changes + * @see private method createChange() in Collection + * @memberof Loki + */ + Loki.prototype.generateChangesNotification = function (arrayOfCollectionNames) { + function getCollName(coll) { + return coll.name; + } + var changes = [], + selectedCollections = arrayOfCollectionNames || this.collections.map(getCollName); + + this.collections.forEach(function (coll) { + if (selectedCollections.indexOf(getCollName(coll)) !== -1) { + changes = changes.concat(coll.getChanges()); + } + }); + return changes; + }; + + /** + * (Changes API) - stringify changes for network transmission + * @returns {string} string representation of the changes + * @memberof Loki + */ + Loki.prototype.serializeChanges = function (collectionNamesArray) { + return JSON.stringify(this.generateChangesNotification(collectionNamesArray)); + }; + + /** + * (Changes API) : clears all the changes in all collections. + * @memberof Loki + */ + Loki.prototype.clearChanges = function () { + this.collections.forEach(function (coll) { + if (coll.flushChanges) { + coll.flushChanges(); + } + }); + }; + + /*------------------+ + | PERSISTENCE | + -------------------*/ + + /** there are two build in persistence adapters for internal use + * fs for use in Nodejs type environments + * localStorage for use in browser environment + * defined as helper classes here so its easy and clean to use + */ + + /** + * In in-memory persistence adapter for an in-memory database. + * This simple 'key/value' adapter is intended for unit testing and diagnostics. + * + * @param {object=} options - memory adapter options + * @param {boolean} [options.asyncResponses=false] - whether callbacks are invoked asynchronously + * @param {int} [options.asyncTimeout=50] - timeout in ms to queue callbacks + * @constructor LokiMemoryAdapter + */ + function LokiMemoryAdapter(options) { + this.hashStore = {}; + this.options = options || {}; + + if (!this.options.hasOwnProperty('asyncResponses')) { + this.options.asyncResponses = false; + } + + if (!this.options.hasOwnProperty('asyncTimeout')) { + this.options.asyncTimeout = 50; // 50 ms default + } + } + + /** + * Loads a serialized database from its in-memory store. + * (Loki persistence adapter interface function) + * + * @param {string} dbname - name of the database (filename/keyname) + * @param {function} callback - adapter callback to return load result to caller + * @memberof LokiMemoryAdapter + */ + LokiMemoryAdapter.prototype.loadDatabase = function (dbname, callback) { + var self=this; + + if (this.options.asyncResponses) { + setTimeout(function() { + if (self.hashStore.hasOwnProperty(dbname)) { + callback(self.hashStore[dbname].value); + } + else { + // database doesn't exist, return falsy + callback (null); + } + }, this.options.asyncTimeout); + } + else { + if (this.hashStore.hasOwnProperty(dbname)) { + // database doesn't exist, return falsy + callback(this.hashStore[dbname].value); + } + else { + callback (null); + } + } + }; + + /** + * Saves a serialized database to its in-memory store. + * (Loki persistence adapter interface function) + * + * @param {string} dbname - name of the database (filename/keyname) + * @param {function} callback - adapter callback to return load result to caller + * @memberof LokiMemoryAdapter + */ + LokiMemoryAdapter.prototype.saveDatabase = function (dbname, dbstring, callback) { + var self=this; + var saveCount; + + if (this.options.asyncResponses) { + setTimeout(function() { + saveCount = (self.hashStore.hasOwnProperty(dbname)?self.hashStore[dbname].savecount:0); + + self.hashStore[dbname] = { + savecount: saveCount+1, + lastsave: new Date(), + value: dbstring + }; + + callback(); + }, this.options.asyncTimeout); + } + else { + saveCount = (this.hashStore.hasOwnProperty(dbname)?this.hashStore[dbname].savecount:0); + + this.hashStore[dbname] = { + savecount: saveCount+1, + lastsave: new Date(), + value: dbstring + }; + + callback(); + } + }; + + /** + * Deletes a database from its in-memory store. + * + * @param {string} dbname - name of the database (filename/keyname) + * @param {function} callback - function to call when done + * @memberof LokiMemoryAdapter + */ + LokiMemoryAdapter.prototype.deleteDatabase = function(dbname, callback) { + if (this.hashStore.hasOwnProperty(dbname)) { + delete this.hashStore[dbname]; + } + + if (typeof callback === "function") { + callback(); + } + }; + + /** + * An adapter for adapters. Converts a non reference mode adapter into a reference mode adapter + * which can perform destructuring and partioning. Each collection will be stored in its own key/save and + * only dirty collections will be saved. If you turn on paging with default page size of 25megs and save + * a 75 meg collection it should use up roughly 3 save slots (key/value pairs sent to inner adapter). + * A dirty collection that spans three pages will save all three pages again + * Paging mode was added mainly because Chrome has issues saving 'too large' of a string within a + * single indexeddb row. If a single document update causes the collection to be flagged as dirty, all + * of that collection's pages will be written on next save. + * + * @param {object} adapter - reference to a 'non-reference' mode loki adapter instance. + * @param {object=} options - configuration options for partitioning and paging + * @param {bool} options.paging - (default: false) set to true to enable paging collection data. + * @param {int} options.pageSize - (default : 25MB) you can use this to limit size of strings passed to inner adapter. + * @param {string} options.delimiter - allows you to override the default delimeter + * @constructor LokiPartitioningAdapter + */ + function LokiPartitioningAdapter(adapter, options) { + this.mode = "reference"; + this.adapter = null; + this.options = options || {}; + this.dbref = null; + this.dbname = ""; + this.pageIterator = {}; + + // verify user passed an appropriate adapter + if (adapter) { + if (adapter.mode === "reference") { + throw new Error("LokiPartitioningAdapter cannot be instantiated with a reference mode adapter"); + } + else { + this.adapter = adapter; + } + } + else { + throw new Error("LokiPartitioningAdapter requires a (non-reference mode) adapter on construction"); + } + + // set collection paging defaults + if (!this.options.hasOwnProperty("paging")) { + this.options.paging = false; + } + + // default to page size of 25 megs (can be up to your largest serialized object size larger than this) + if (!this.options.hasOwnProperty("pageSize")) { + this.options.pageSize = 25*1024*1024; + } + + if (!this.options.hasOwnProperty("delimiter")) { + this.options.delimiter = '$<\n'; + } + } + + /** + * Loads a database which was partitioned into several key/value saves. + * (Loki persistence adapter interface function) + * + * @param {string} dbname - name of the database (filename/keyname) + * @param {function} callback - adapter callback to return load result to caller + * @memberof LokiPartitioningAdapter + */ + LokiPartitioningAdapter.prototype.loadDatabase = function (dbname, callback) { + var self=this; + this.dbname = dbname; + this.dbref = new Loki(dbname); + + // load the db container (without data) + this.adapter.loadDatabase(dbname, function(result) { + // empty database condition is for inner adapter return null/undefined/falsy + if (!result) { + // partition 0 not found so new database, no need to try to load other partitions. + // return same falsy result to loadDatabase to signify no database exists (yet) + callback(result); + return; + } + + if (typeof result !== "string") { + callback(new Error("LokiPartitioningAdapter received an unexpected response from inner adapter loadDatabase()")); + } + + // I will want to use loki destructuring helper methods so i will inflate into typed instance + var db = JSON.parse(result); + self.dbref.loadJSONObject(db); + db = null; + + var clen = self.dbref.collections.length; + + if (self.dbref.collections.length === 0) { + callback(self.dbref); + return; + } + + self.pageIterator = { + collection: 0, + pageIndex: 0 + }; + + self.loadNextPartition(0, function() { + callback(self.dbref); + }); + }); + }; + + /** + * Used to sequentially load each collection partition, one at a time. + * + * @param {int} partition - ordinal collection position to load next + * @param {function} callback - adapter callback to return load result to caller + */ + LokiPartitioningAdapter.prototype.loadNextPartition = function(partition, callback) { + var keyname = this.dbname + "." + partition; + var self=this; + + if (this.options.paging === true) { + this.pageIterator.pageIndex = 0; + this.loadNextPage(callback); + return; + } + + this.adapter.loadDatabase(keyname, function(result) { + var data = self.dbref.deserializeCollection(result, { delimited: true, collectionIndex: partition }); + self.dbref.collections[partition].data = data; + + if (++partition < self.dbref.collections.length) { + self.loadNextPartition(partition, callback); + } + else { + callback(); + } + }); + }; + + /** + * Used to sequentially load the next page of collection partition, one at a time. + * + * @param {function} callback - adapter callback to return load result to caller + */ + LokiPartitioningAdapter.prototype.loadNextPage = function(callback) { + // calculate name for next saved page in sequence + var keyname = this.dbname + "." + this.pageIterator.collection + "." + this.pageIterator.pageIndex; + var self=this; + + // load whatever page is next in sequence + this.adapter.loadDatabase(keyname, function(result) { + var data = result.split(self.options.delimiter); + result = ""; // free up memory now that we have split it into array + var dlen = data.length; + var idx; + + // detect if last page by presence of final empty string element and remove it if so + var isLastPage = (data[dlen-1] === ""); + if (isLastPage) { + data.pop(); + dlen = data.length; + // empty collections are just a delimiter meaning two blank items + if (data[dlen-1] === "" && dlen === 1) { + data.pop(); + dlen = data.length; + } + } + + // convert stringified array elements to object instances and push to collection data + for(idx=0; idx < dlen; idx++) { + self.dbref.collections[self.pageIterator.collection].data.push(JSON.parse(data[idx])); + data[idx] = null; + } + data = []; + + // if last page, we are done with this partition + if (isLastPage) { + + // if there are more partitions, kick off next partition load + if (++self.pageIterator.collection < self.dbref.collections.length) { + self.loadNextPartition(self.pageIterator.collection, callback); + } + else { + callback(); + } + } + else { + self.pageIterator.pageIndex++; + self.loadNextPage(callback); + } + }); + }; + + /** + * Saves a database by partioning into separate key/value saves. + * (Loki 'reference mode' persistence adapter interface function) + * + * @param {string} dbname - name of the database (filename/keyname) + * @param {object} dbref - reference to database which we will partition and save. + * @param {function} callback - adapter callback to return load result to caller + * + * @memberof LokiPartitioningAdapter + */ + LokiPartitioningAdapter.prototype.exportDatabase = function(dbname, dbref, callback) { + var self=this; + var idx, clen = dbref.collections.length; + + this.dbref = dbref; + this.dbname = dbname; + + // queue up dirty partitions to be saved + this.dirtyPartitions = [-1]; + for(idx=0; idx= cdlen) doneWithPartition = true; + } + // if our current page is bigger than defined pageSize, we are done with page + if (pageLen >= this.options.pageSize) doneWithPage = true; + + // if not done with current page, need delimiter before next item + // if done with partition we also want a delmiter to indicate 'end of pages' final empty row + if (!doneWithPage || doneWithPartition) { + pageBuilder += this.options.delimiter; + pageLen += delimlen; + } + + // if we are done with page save it and pass off to next recursive call or callback + if (doneWithPartition || doneWithPage) { + this.adapter.saveDatabase(keyname, pageBuilder, pageSaveCallback); + return; + } + } + }; + + /** + * A loki persistence adapter which persists using node fs module + * @constructor LokiFsAdapter + */ + function LokiFsAdapter() { + try { + this.fs = require('fs'); + }catch(e) { + this.fs = null; + } + } + + /** + * loadDatabase() - Load data from file, will throw an error if the file does not exist + * @param {string} dbname - the filename of the database to load + * @param {function} callback - the callback to handle the result + * @memberof LokiFsAdapter + */ + LokiFsAdapter.prototype.loadDatabase = function loadDatabase(dbname, callback) { + var self = this; + + this.fs.stat(dbname, function (err, stats) { + if (!err && stats.isFile()) { + self.fs.readFile(dbname, { + encoding: 'utf8' + }, function readFileCallback(err, data) { + if (err) { + callback(new Error(err)); + } else { + callback(data); + } + }); + } + else { + callback(null); + } + }); + }; + + /** + * saveDatabase() - save data to file, will throw an error if the file can't be saved + * might want to expand this to avoid dataloss on partial save + * @param {string} dbname - the filename of the database to load + * @param {function} callback - the callback to handle the result + * @memberof LokiFsAdapter + */ + LokiFsAdapter.prototype.saveDatabase = function saveDatabase(dbname, dbstring, callback) { + var self = this; + var tmpdbname = dbname + '~'; + this.fs.writeFile(tmpdbname, dbstring, function writeFileCallback(err) { + if (err) { + callback(new Error(err)); + } else { + self.fs.rename(tmpdbname,dbname,callback); + } + }); + }; + + /** + * deleteDatabase() - delete the database file, will throw an error if the + * file can't be deleted + * @param {string} dbname - the filename of the database to delete + * @param {function} callback - the callback to handle the result + * @memberof LokiFsAdapter + */ + LokiFsAdapter.prototype.deleteDatabase = function deleteDatabase(dbname, callback) { + this.fs.unlink(dbname, function deleteDatabaseCallback(err) { + if (err) { + callback(new Error(err)); + } else { + callback(); + } + }); + }; + + + /** + * A loki persistence adapter which persists to web browser's local storage object + * @constructor LokiLocalStorageAdapter + */ + function LokiLocalStorageAdapter() {} + + /** + * loadDatabase() - Load data from localstorage + * @param {string} dbname - the name of the database to load + * @param {function} callback - the callback to handle the result + * @memberof LokiLocalStorageAdapter + */ + LokiLocalStorageAdapter.prototype.loadDatabase = function loadDatabase(dbname, callback) { + if (localStorageAvailable()) { + callback(localStorage.getItem(dbname)); + } else { + callback(new Error('localStorage is not available')); + } + }; + + /** + * saveDatabase() - save data to localstorage, will throw an error if the file can't be saved + * might want to expand this to avoid dataloss on partial save + * @param {string} dbname - the filename of the database to load + * @param {function} callback - the callback to handle the result + * @memberof LokiLocalStorageAdapter + */ + LokiLocalStorageAdapter.prototype.saveDatabase = function saveDatabase(dbname, dbstring, callback) { + if (localStorageAvailable()) { + localStorage.setItem(dbname, dbstring); + callback(null); + } else { + callback(new Error('localStorage is not available')); + } + }; + + /** + * deleteDatabase() - delete the database from localstorage, will throw an error if it + * can't be deleted + * @param {string} dbname - the filename of the database to delete + * @param {function} callback - the callback to handle the result + * @memberof LokiLocalStorageAdapter + */ + LokiLocalStorageAdapter.prototype.deleteDatabase = function deleteDatabase(dbname, callback) { + if (localStorageAvailable()) { + localStorage.removeItem(dbname); + callback(null); + } else { + callback(new Error('localStorage is not available')); + } + }; + + /** + * Wait for throttledSaves to complete and invoke your callback when drained or duration is met. + * + * @param {function} callback - callback to fire when save queue is drained, it is passed a sucess parameter value + * @param {object=} options - configuration options + * @param {boolean} options.recursiveWait - (default: true) if after queue is drained, another save was kicked off, wait for it + * @param {bool} options.recursiveWaitLimit - (default: false) limit our recursive waiting to a duration + * @param {int} options.recursiveWaitLimitDelay - (default: 2000) cutoff in ms to stop recursively re-draining + * @memberof Loki + */ + Loki.prototype.throttledSaveDrain = function(callback, options) { + var self = this; + var now = (new Date()).getTime(); + + if (!this.throttledSaves) { + callback(true); + } + + options = options || {}; + if (!options.hasOwnProperty('recursiveWait')) { + options.recursiveWait = true; + } + if (!options.hasOwnProperty('recursiveWaitLimit')) { + options.recursiveWaitLimit = false; + } + if (!options.hasOwnProperty('recursiveWaitLimitDuration')) { + options.recursiveWaitLimitDuration = 2000; + } + if (!options.hasOwnProperty('started')) { + options.started = (new Date()).getTime(); + } + + // if save is pending + if (this.throttledSaves && this.throttledSavePending) { + // if we want to wait until we are in a state where there are no pending saves at all + if (options.recursiveWait) { + // queue the following meta callback for when it completes + this.throttledCallbacks.push(function() { + // if there is now another save pending... + if (self.throttledSavePending) { + // if we wish to wait only so long and we have exceeded limit of our waiting, callback with false success value + if (options.recursiveWaitLimit && (now - options.started > options.recursiveWaitLimitDuration)) { + callback(false); + return; + } + // it must be ok to wait on next queue drain + self.throttledSaveDrain(callback, options); + return; + } + // no pending saves so callback with true success + else { + callback(true); + return; + } + }); + } + // just notify when current queue is depleted + else { + this.throttledCallbacks.push(callback); + return; + } + } + // no save pending, just callback + else { + callback(true); + } + }; + + /** + * Internal load logic, decoupled from throttling/contention logic + * + * @param {object} options - not currently used (remove or allow overrides?) + * @param {function=} callback - (Optional) user supplied async callback / error handler + */ + Loki.prototype.loadDatabaseInternal = function (options, callback) { + var cFun = callback || function (err, data) { + if (err) { + throw err; + } + }, + self = this; + + // the persistenceAdapter should be present if all is ok, but check to be sure. + if (this.persistenceAdapter !== null) { + + this.persistenceAdapter.loadDatabase(this.filename, function loadDatabaseCallback(dbString) { + if (typeof (dbString) === 'string') { + var parseSuccess = false; + try { + self.loadJSON(dbString, options || {}); + parseSuccess = true; + } catch (err) { + cFun(err); + } + if (parseSuccess) { + cFun(null); + self.emit('loaded', 'database ' + self.filename + ' loaded'); + } + } else { + // falsy result means new database + if (!dbString) { + cFun(null); + self.emit('loaded', 'empty database ' + self.filename + ' loaded'); + return; + } + + // instanceof error means load faulted + if (dbString instanceof Error) { + cFun(dbString); + return; + } + + // if adapter has returned an js object (other than null or error) attempt to load from JSON object + if (typeof (dbString) === "object") { + self.loadJSONObject(dbString, options || {}); + cFun(null); // return null on success + self.emit('loaded', 'database ' + self.filename + ' loaded'); + return; + } + + cFun("unexpected adapter response : " + dbString); + } + }); + + } else { + cFun(new Error('persistenceAdapter not configured')); + } + }; + + /** + * Handles manually loading from file system, local storage, or adapter (such as indexeddb) + * This method utilizes loki configuration options (if provided) to determine which + * persistence method to use, or environment detection (if configuration was not provided). + * To avoid contention with any throttledSaves, we will drain the save queue first. + * + * If you are configured with autosave, you do not need to call this method yourself. + * + * @param {object} options - if throttling saves and loads, this controls how we drain save queue before loading + * @param {boolean} options.recursiveWait - (default: true) wait recursively until no saves are queued + * @param {bool} options.recursiveWaitLimit - (default: false) limit our recursive waiting to a duration + * @param {int} options.recursiveWaitLimitDelay - (default: 2000) cutoff in ms to stop recursively re-draining + * @param {function=} callback - (Optional) user supplied async callback / error handler + * @memberof Loki + * @example + * db.loadDatabase({}, function(err) { + * if (err) { + * console.log("error : " + err); + * } + * else { + * console.log("database loaded."); + * } + * }); + */ + Loki.prototype.loadDatabase = function (options, callback) { + var self=this; + + // if throttling disabled, just call internal + if (!this.throttledSaves) { + this.loadDatabaseInternal(options, callback); + return; + } + + // try to drain any pending saves in the queue to lock it for loading + this.throttledSaveDrain(function(success) { + if (success) { + // pause/throttle saving until loading is done + self.throttledSavePending = true; + + self.loadDatabaseInternal(options, function(err) { + // now that we are finished loading, if no saves were throttled, disable flag + if (self.throttledCallbacks.length === 0) { + self.throttledSavePending = false; + } + // if saves requests came in while loading, kick off new save to kick off resume saves + else { + self.saveDatabase(); + } + + if (typeof callback === 'function') { + callback(err); + } + }); + return; + } + else { + if (typeof callback === 'function') { + callback(new Error("Unable to pause save throttling long enough to read database")); + } + } + }, options); + }; + + /** + * Internal save logic, decoupled from save throttling logic + */ + Loki.prototype.saveDatabaseInternal = function (callback) { + var cFun = callback || function (err) { + if (err) { + throw err; + } + return; + }, + self = this; + + // the persistenceAdapter should be present if all is ok, but check to be sure. + if (this.persistenceAdapter !== null) { + // check if the adapter is requesting (and supports) a 'reference' mode export + if (this.persistenceAdapter.mode === "reference" && typeof this.persistenceAdapter.exportDatabase === "function") { + // filename may seem redundant but loadDatabase will need to expect this same filename + this.persistenceAdapter.exportDatabase(this.filename, this.copy({removeNonSerializable:true}), function exportDatabaseCallback(err) { + self.autosaveClearFlags(); + cFun(err); + }); + } + // otherwise just pass the serialized database to adapter + else { + // persistenceAdapter might be asynchronous, so we must clear `dirty` immediately + // or autosave won't work if an update occurs between here and the callback + self.autosaveClearFlags(); + this.persistenceAdapter.saveDatabase(this.filename, self.serialize(), function saveDatabasecallback(err) { + cFun(err); + }); + } + } else { + cFun(new Error('persistenceAdapter not configured')); + } + }; + + /** + * Handles manually saving to file system, local storage, or adapter (such as indexeddb) + * This method utilizes loki configuration options (if provided) to determine which + * persistence method to use, or environment detection (if configuration was not provided). + * + * If you are configured with autosave, you do not need to call this method yourself. + * + * @param {function=} callback - (Optional) user supplied async callback / error handler + * @memberof Loki + * @example + * db.saveDatabase(function(err) { + * if (err) { + * console.log("error : " + err); + * } + * else { + * console.log("database saved."); + * } + * }); + */ + Loki.prototype.saveDatabase = function (callback) { + if (!this.throttledSaves) { + this.saveDatabaseInternal(callback); + return; + } + + if (this.throttledSavePending) { + this.throttledCallbacks.push(callback); + return; + } + + var localCallbacks = this.throttledCallbacks; + this.throttledCallbacks = []; + localCallbacks.unshift(callback); + this.throttledSavePending = true; + + var self = this; + this.saveDatabaseInternal(function(err) { + self.throttledSavePending = false; + localCallbacks.forEach(function(pcb) { + if (typeof pcb === 'function') { + // Queue the callbacks so we first finish this method execution + setTimeout(function() { + pcb(err); + }, 1); + } + }); + + // since this is called async, future requests may have come in, if so.. kick off next save + if (self.throttledCallbacks.length > 0) { + self.saveDatabase(); + } + }); + }; + + // alias + Loki.prototype.save = Loki.prototype.saveDatabase; + + /** + * Handles deleting a database from file system, local + * storage, or adapter (indexeddb) + * This method utilizes loki configuration options (if provided) to determine which + * persistence method to use, or environment detection (if configuration was not provided). + * + * @param {function=} callback - (Optional) user supplied async callback / error handler + * @memberof Loki + */ + Loki.prototype.deleteDatabase = function (options, callback) { + var cFun = callback || function (err, data) { + if (err) { + throw err; + } + }; + + // we aren't even using options, so we will support syntax where + // callback is passed as first and only argument + if (typeof options === 'function' && !callback) { + cFun = options; + } + + // the persistenceAdapter should be present if all is ok, but check to be sure. + if (this.persistenceAdapter !== null) { + this.persistenceAdapter.deleteDatabase(this.filename, function deleteDatabaseCallback(err) { + cFun(err); + }); + } else { + cFun(new Error('persistenceAdapter not configured')); + } + }; + + /** + * autosaveDirty - check whether any collections are 'dirty' meaning we need to save (entire) database + * + * @returns {boolean} - true if database has changed since last autosave, false if not. + */ + Loki.prototype.autosaveDirty = function () { + for (var idx = 0; idx < this.collections.length; idx++) { + if (this.collections[idx].dirty) { + return true; + } + } + + return false; + }; + + /** + * autosaveClearFlags - resets dirty flags on all collections. + * Called from saveDatabase() after db is saved. + * + */ + Loki.prototype.autosaveClearFlags = function () { + for (var idx = 0; idx < this.collections.length; idx++) { + this.collections[idx].dirty = false; + } + }; + + /** + * autosaveEnable - begin a javascript interval to periodically save the database. + * + * @param {object} options - not currently used (remove or allow overrides?) + * @param {function=} callback - (Optional) user supplied async callback + */ + Loki.prototype.autosaveEnable = function (options, callback) { + this.autosave = true; + + var delay = 5000, + self = this; + + if (typeof (this.autosaveInterval) !== 'undefined' && this.autosaveInterval !== null) { + delay = this.autosaveInterval; + } + + this.autosaveHandle = setInterval(function autosaveHandleInterval() { + // use of dirty flag will need to be hierarchical since mods are done at collection level with no visibility of 'db' + // so next step will be to implement collection level dirty flags set on insert/update/remove + // along with loki level isdirty() function which iterates all collections to see if any are dirty + + if (self.autosaveDirty()) { + self.saveDatabase(callback); + } + }, delay); + }; + + /** + * autosaveDisable - stop the autosave interval timer. + * + */ + Loki.prototype.autosaveDisable = function () { + if (typeof (this.autosaveHandle) !== 'undefined' && this.autosaveHandle !== null) { + clearInterval(this.autosaveHandle); + this.autosaveHandle = null; + } + }; + + + /** + * Resultset class allowing chainable queries. Intended to be instanced internally. + * Collection.find(), Collection.where(), and Collection.chain() instantiate this. + * + * @example + * mycollection.chain() + * .find({ 'doors' : 4 }) + * .where(function(obj) { return obj.name === 'Toyota' }) + * .data(); + * + * @constructor Resultset + * @param {Collection} collection - The collection which this Resultset will query against. + */ + function Resultset(collection, options) { + options = options || {}; + + // retain reference to collection we are querying against + this.collection = collection; + this.filteredrows = []; + this.filterInitialized = false; + + return this; + } + + /** + * reset() - Reset the resultset to its initial state. + * + * @returns {Resultset} Reference to this resultset, for future chain operations. + */ + Resultset.prototype.reset = function () { + if (this.filteredrows.length > 0) { + this.filteredrows = []; + } + this.filterInitialized = false; + return this; + }; + + /** + * toJSON() - Override of toJSON to avoid circular references + * + */ + Resultset.prototype.toJSON = function () { + var copy = this.copy(); + copy.collection = null; + return copy; + }; + + /** + * Allows you to limit the number of documents passed to next chain operation. + * A resultset copy() is made to avoid altering original resultset. + * + * @param {int} qty - The number of documents to return. + * @returns {Resultset} Returns a copy of the resultset, limited by qty, for subsequent chain ops. + * @memberof Resultset + * // find the two oldest users + * var result = users.chain().simplesort("age", true).limit(2).data(); + */ + Resultset.prototype.limit = function (qty) { + // if this has no filters applied, we need to populate filteredrows first + if (!this.filterInitialized && this.filteredrows.length === 0) { + this.filteredrows = this.collection.prepareFullDocIndex(); + } + + var rscopy = new Resultset(this.collection); + rscopy.filteredrows = this.filteredrows.slice(0, qty); + rscopy.filterInitialized = true; + return rscopy; + }; + + /** + * Used for skipping 'pos' number of documents in the resultset. + * + * @param {int} pos - Number of documents to skip; all preceding documents are filtered out. + * @returns {Resultset} Returns a copy of the resultset, containing docs starting at 'pos' for subsequent chain ops. + * @memberof Resultset + * // find everyone but the two oldest users + * var result = users.chain().simplesort("age", true).offset(2).data(); + */ + Resultset.prototype.offset = function (pos) { + // if this has no filters applied, we need to populate filteredrows first + if (!this.filterInitialized && this.filteredrows.length === 0) { + this.filteredrows = this.collection.prepareFullDocIndex(); + } + + var rscopy = new Resultset(this.collection); + rscopy.filteredrows = this.filteredrows.slice(pos); + rscopy.filterInitialized = true; + return rscopy; + }; + + /** + * copy() - To support reuse of resultset in branched query situations. + * + * @returns {Resultset} Returns a copy of the resultset (set) but the underlying document references will be the same. + * @memberof Resultset + */ + Resultset.prototype.copy = function () { + var result = new Resultset(this.collection); + + if (this.filteredrows.length > 0) { + result.filteredrows = this.filteredrows.slice(); + } + result.filterInitialized = this.filterInitialized; + + return result; + }; + + /** + * Alias of copy() + * @memberof Resultset + */ + Resultset.prototype.branch = Resultset.prototype.copy; + + /** + * transform() - executes a named collection transform or raw array of transform steps against the resultset. + * + * @param transform {(string|array)} - name of collection transform or raw transform array + * @param parameters {object=} - (Optional) object property hash of parameters, if the transform requires them. + * @returns {Resultset} either (this) resultset or a clone of of this resultset (depending on steps) + * @memberof Resultset + * @example + * users.addTransform('CountryFilter', [ + * { + * type: 'find', + * value: { + * 'country': { $eq: '[%lktxp]Country' } + * } + * }, + * { + * type: 'simplesort', + * property: 'age', + * options: { desc: false} + * } + * ]); + * var results = users.chain().transform("CountryFilter", { Country: 'fr' }).data(); + */ + Resultset.prototype.transform = function (transform, parameters) { + var idx, + step, + rs = this; + + // if transform is name, then do lookup first + if (typeof transform === 'string') { + if (this.collection.transforms.hasOwnProperty(transform)) { + transform = this.collection.transforms[transform]; + } + } + + // either they passed in raw transform array or we looked it up, so process + if (typeof transform !== 'object' || !Array.isArray(transform)) { + throw new Error("Invalid transform"); + } + + if (typeof parameters !== 'undefined') { + transform = Utils.resolveTransformParams(transform, parameters); + } + + for (idx = 0; idx < transform.length; idx++) { + step = transform[idx]; + + switch (step.type) { + case "find": + rs.find(step.value); + break; + case "where": + rs.where(step.value); + break; + case "simplesort": + rs.simplesort(step.property, step.desc || step.options); + break; + case "compoundsort": + rs.compoundsort(step.value); + break; + case "sort": + rs.sort(step.value); + break; + case "limit": + rs = rs.limit(step.value); + break; // limit makes copy so update reference + case "offset": + rs = rs.offset(step.value); + break; // offset makes copy so update reference + case "map": + rs = rs.map(step.value, step.dataOptions); + break; + case "eqJoin": + rs = rs.eqJoin(step.joinData, step.leftJoinKey, step.rightJoinKey, step.mapFun, step.dataOptions); + break; + // following cases break chain by returning array data so make any of these last in transform steps + case "mapReduce": + rs = rs.mapReduce(step.mapFunction, step.reduceFunction); + break; + // following cases update documents in current filtered resultset (use carefully) + case "update": + rs.update(step.value); + break; + case "remove": + rs.remove(); + break; + default: + break; + } + } + + return rs; + }; + + /** + * User supplied compare function is provided two documents to compare. (chainable) + * @example + * rslt.sort(function(obj1, obj2) { + * if (obj1.name === obj2.name) return 0; + * if (obj1.name > obj2.name) return 1; + * if (obj1.name < obj2.name) return -1; + * }); + * + * @param {function} comparefun - A javascript compare function used for sorting. + * @returns {Resultset} Reference to this resultset, sorted, for future chain operations. + * @memberof Resultset + */ + Resultset.prototype.sort = function (comparefun) { + // if this has no filters applied, just we need to populate filteredrows first + if (!this.filterInitialized && this.filteredrows.length === 0) { + this.filteredrows = this.collection.prepareFullDocIndex(); + } + + var wrappedComparer = + (function (userComparer, data) { + return function (a, b) { + return userComparer(data[a], data[b]); + }; + })(comparefun, this.collection.data); + + this.filteredrows.sort(wrappedComparer); + + return this; + }; + + /** + * Simpler, loose evaluation for user to sort based on a property name. (chainable). + * Sorting based on the same lt/gt helper functions used for binary indices. + * + * @param {string} propname - name of property to sort by. + * @param {object|bool=} options - boolean to specify if isdescending, or options object + * @param {boolean} [options.desc=false] - whether to sort descending + * @param {boolean} [options.disableIndexIntersect=false] - whether we should explicity not use array intersection. + * @param {boolean} [options.forceIndexIntersect=false] - force array intersection (if binary index exists). + * @param {boolean} [options.useJavascriptSorting=false] - whether results are sorted via basic javascript sort. + * @returns {Resultset} Reference to this resultset, sorted, for future chain operations. + * @memberof Resultset + * @example + * var results = users.chain().simplesort('age').data(); + */ + Resultset.prototype.simplesort = function (propname, options) { + var eff, + targetEff = 10, + dc = this.collection.data.length, + frl = this.filteredrows.length, + hasBinaryIndex = this.collection.binaryIndices.hasOwnProperty(propname); + + if (typeof (options) === 'undefined' || options === false) { + options = { desc: false }; + } + if (options === true) { + options = { desc: true }; + } + + // if nothing in filtered rows array... + if (frl === 0) { + // if the filter is initialized to be empty resultset, do nothing + if (this.filterInitialized) { + return this; + } + + // otherwise no filters applied implies all documents, so we need to populate filteredrows first + + // if we have a binary index, we can just use that instead of sorting (again) + if (this.collection.binaryIndices.hasOwnProperty(propname)) { + // make sure index is up-to-date + this.collection.ensureIndex(propname); + // copy index values into filteredrows + this.filteredrows = this.collection.binaryIndices[propname].values.slice(0); + + if (options.desc) { + this.filteredrows.reverse(); + } + + // we are done, return this (resultset) for further chain ops + return this; + } + // otherwise initialize array for sort below + else { + // build full document index (to be sorted subsequently) + this.filteredrows = this.collection.prepareFullDocIndex(); + } + } + // otherwise we had results to begin with, see if we qualify for index intercept optimization + else { + + // If already filtered, but we want to leverage binary index on sort. + // This will use custom array intection algorithm. + if (!options.disableIndexIntersect && hasBinaryIndex) { + + // calculate filter efficiency + eff = dc/frl; + + // when javascript sort fallback is enabled, you generally need more than ~17% of total docs in resultset + // before array intersect is determined to be the faster algorithm, otherwise leave at 10% for loki sort. + if (options.useJavascriptSorting) { + targetEff = 6; + } + + // anything more than ratio of 10:1 (total documents/current results) should use old sort code path + // So we will only use array intersection if you have more than 10% of total docs in your current resultset. + if (eff <= targetEff || options.forceIndexIntersect) { + var idx, fr=this.filteredrows; + var io = {}; + // set up hashobject for simple 'inclusion test' with existing (filtered) results + for(idx=0; idx obj2[propname]) return 1; + if (obj1[propname] < obj2[propname]) return -1; + }); + } + + // otherwise use loki sort which will return same results if column is indexed or not + var wrappedComparer = + (function (prop, desc, data) { + var val1, val2, arr; + return function (a, b) { + if (~prop.indexOf('.')) { + arr = prop.split('.'); + val1 = arr.reduce(function(obj, i) { return obj && obj[i] || undefined; }, data[a]); + val2 = arr.reduce(function(obj, i) { return obj && obj[i] || undefined; }, data[b]); + } else { + val1 = data[a][prop]; + val2 = data[b][prop]; + } + return sortHelper(val1, val2, desc); + }; + })(propname, options.desc, this.collection.data); + + this.filteredrows.sort(wrappedComparer); + + return this; + }; + + /** + * Allows sorting a resultset based on multiple columns. + * @example + * // to sort by age and then name (both ascending) + * rs.compoundsort(['age', 'name']); + * // to sort by age (ascending) and then by name (descending) + * rs.compoundsort(['age', ['name', true]); + * + * @param {array} properties - array of property names or subarray of [propertyname, isdesc] used evaluate sort order + * @returns {Resultset} Reference to this resultset, sorted, for future chain operations. + * @memberof Resultset + */ + Resultset.prototype.compoundsort = function (properties) { + if (properties.length === 0) { + throw new Error("Invalid call to compoundsort, need at least one property"); + } + + var prop; + if (properties.length === 1) { + prop = properties[0]; + if (Array.isArray(prop)) { + return this.simplesort(prop[0], prop[1]); + } + return this.simplesort(prop, false); + } + + // unify the structure of 'properties' to avoid checking it repeatedly while sorting + for (var i = 0, len = properties.length; i < len; i += 1) { + prop = properties[i]; + if (!Array.isArray(prop)) { + properties[i] = [prop, false]; + } + } + + // if this has no filters applied, just we need to populate filteredrows first + if (!this.filterInitialized && this.filteredrows.length === 0) { + this.filteredrows = this.collection.prepareFullDocIndex(); + } + + var wrappedComparer = + (function (props, data) { + return function (a, b) { + return compoundeval(props, data[a], data[b]); + }; + })(properties, this.collection.data); + + this.filteredrows.sort(wrappedComparer); + + return this; + }; + + /** + * findOr() - oversee the operation of OR'ed query expressions. + * OR'ed expression evaluation runs each expression individually against the full collection, + * and finally does a set OR on each expression's results. + * Each evaluation can utilize a binary index to prevent multiple linear array scans. + * + * @param {array} expressionArray - array of expressions + * @returns {Resultset} this resultset for further chain ops. + */ + Resultset.prototype.findOr = function (expressionArray) { + var fr = null, + fri = 0, + frlen = 0, + docset = [], + idxset = [], + idx = 0, + origCount = this.count(); + + // If filter is already initialized, then we query against only those items already in filter. + // This means no index utilization for fields, so hopefully its filtered to a smallish filteredrows. + for (var ei = 0, elen = expressionArray.length; ei < elen; ei++) { + // we need to branch existing query to run each filter separately and combine results + fr = this.branch().find(expressionArray[ei]).filteredrows; + frlen = fr.length; + // if the find operation did not reduce the initial set, then the initial set is the actual result + if (frlen === origCount) { + return this; + } + + // add any document 'hits' + for (fri = 0; fri < frlen; fri++) { + idx = fr[fri]; + if (idxset[idx] === undefined) { + idxset[idx] = true; + docset.push(idx); + } + } + } + + this.filteredrows = docset; + this.filterInitialized = true; + + return this; + }; + Resultset.prototype.$or = Resultset.prototype.findOr; + + /** + * findAnd() - oversee the operation of AND'ed query expressions. + * AND'ed expression evaluation runs each expression progressively against the full collection, + * internally utilizing existing chained resultset functionality. + * Only the first filter can utilize a binary index. + * + * @param {array} expressionArray - array of expressions + * @returns {Resultset} this resultset for further chain ops. + */ + Resultset.prototype.findAnd = function (expressionArray) { + // we have already implementing method chaining in this (our Resultset class) + // so lets just progressively apply user supplied and filters + for (var i = 0, len = expressionArray.length; i < len; i++) { + if (this.count() === 0) { + return this; + } + this.find(expressionArray[i]); + } + return this; + }; + Resultset.prototype.$and = Resultset.prototype.findAnd; + + /** + * Used for querying via a mongo-style query object. + * + * @param {object} query - A mongo-style query object used for filtering current results. + * @param {boolean=} firstOnly - (Optional) Used by collection.findOne() + * @returns {Resultset} this resultset for further chain ops. + * @memberof Resultset + * @example + * var over30 = users.chain().find({ age: { $gte: 30 } }).data(); + */ + Resultset.prototype.find = function (query, firstOnly) { + if (this.collection.data.length === 0) { + this.filteredrows = []; + this.filterInitialized = true; + return this; + } + + var queryObject = query || 'getAll', + p, + property, + queryObjectOp, + obj, + operator, + value, + key, + searchByIndex = false, + result = [], + filters = [], + index = null; + + // flag if this was invoked via findOne() + firstOnly = firstOnly || false; + + if (typeof queryObject === 'object') { + for (p in queryObject) { + obj = {}; + obj[p] = queryObject[p]; + filters.push(obj); + + if (hasOwnProperty.call(queryObject, p)) { + property = p; + queryObjectOp = queryObject[p]; + } + } + // if more than one expression in single query object, + // convert implicit $and to explicit $and + if (filters.length > 1) { + return this.find({ '$and': filters }, firstOnly); + } + } + + // apply no filters if they want all + if (!property || queryObject === 'getAll') { + if (firstOnly) { + this.filteredrows = (this.collection.data.length > 0)?[0]: []; + this.filterInitialized = true; + } + + return this; + } + + // injecting $and and $or expression tree evaluation here. + if (property === '$and' || property === '$or') { + this[property](queryObjectOp); + + // for chained find with firstonly, + if (firstOnly && this.filteredrows.length > 1) { + this.filteredrows = this.filteredrows.slice(0, 1); + } + + return this; + } + + // see if query object is in shorthand mode (assuming eq operator) + if (queryObjectOp === null || (typeof queryObjectOp !== 'object' || queryObjectOp instanceof Date)) { + operator = '$eq'; + value = queryObjectOp; + } else if (typeof queryObjectOp === 'object') { + for (key in queryObjectOp) { + if (hasOwnProperty.call(queryObjectOp, key)) { + operator = key; + value = queryObjectOp[key]; + break; + } + } + } else { + throw new Error('Do not know what you want to do.'); + } + + // for regex ops, precompile + if (operator === '$regex') { + if (Array.isArray(value)) { + value = new RegExp(value[0], value[1]); + } else if (!(value instanceof RegExp)) { + value = new RegExp(value); + } + } + + // if user is deep querying the object such as find('name.first': 'odin') + var usingDotNotation = (property.indexOf('.') !== -1); + + // if an index exists for the property being queried against, use it + // for now only enabling where it is the first filter applied and prop is indexed + var doIndexCheck = !usingDotNotation && !this.filterInitialized; + + if (doIndexCheck && this.collection.binaryIndices[property] && indexedOps[operator]) { + // this is where our lazy index rebuilding will take place + // basically we will leave all indexes dirty until we need them + // so here we will rebuild only the index tied to this property + // ensureIndex() will only rebuild if flagged as dirty since we are not passing force=true param + if (this.collection.adaptiveBinaryIndices !== true) { + this.collection.ensureIndex(property); + } + + searchByIndex = true; + index = this.collection.binaryIndices[property]; + } + + // the comparison function + var fun = LokiOps[operator]; + + // "shortcut" for collection data + var t = this.collection.data; + // filter data length + var i = 0, + len = 0; + + // Query executed differently depending on : + // - whether the property being queried has an index defined + // - if chained, we handle first pass differently for initial filteredrows[] population + // + // For performance reasons, each case has its own if block to minimize in-loop calculations + + var filter, rowIdx = 0; + + // If the filteredrows[] is already initialized, use it + if (this.filterInitialized) { + filter = this.filteredrows; + len = filter.length; + + // currently supporting dot notation for non-indexed conditions only + if (usingDotNotation) { + property = property.split('.'); + for(i=0; i= 30; }.data(); + */ + Resultset.prototype.where = function (fun) { + var viewFunction, + result = []; + + if ('function' === typeof fun) { + viewFunction = fun; + } else { + throw new TypeError('Argument is not a stored view or a function'); + } + try { + // If the filteredrows[] is already initialized, use it + if (this.filterInitialized) { + var j = this.filteredrows.length; + + while (j--) { + if (viewFunction(this.collection.data[this.filteredrows[j]]) === true) { + result.push(this.filteredrows[j]); + } + } + + this.filteredrows = result; + + return this; + } + // otherwise this is initial chained op, work against data, push into filteredrows[] + else { + var k = this.collection.data.length; + + while (k--) { + if (viewFunction(this.collection.data[k]) === true) { + result.push(k); + } + } + + this.filteredrows = result; + this.filterInitialized = true; + + return this; + } + } catch (err) { + throw err; + } + }; + + /** + * count() - returns the number of documents in the resultset. + * + * @returns {number} The number of documents in the resultset. + * @memberof Resultset + * @example + * var over30Count = users.chain().find({ age: { $gte: 30 } }).count(); + */ + Resultset.prototype.count = function () { + if (this.filterInitialized) { + return this.filteredrows.length; + } + return this.collection.count(); + }; + + /** + * Terminates the chain and returns array of filtered documents + * + * @param {object=} options - allows specifying 'forceClones' and 'forceCloneMethod' options. + * @param {boolean} options.forceClones - Allows forcing the return of cloned objects even when + * the collection is not configured for clone object. + * @param {string} options.forceCloneMethod - Allows overriding the default or collection specified cloning method. + * Possible values include 'parse-stringify', 'jquery-extend-deep', 'shallow', 'shallow-assign' + * @param {bool} options.removeMeta - Will force clones and strip $loki and meta properties from documents + * + * @returns {array} Array of documents in the resultset + * @memberof Resultset + * @example + * var resutls = users.chain().find({ age: 34 }).data(); + */ + Resultset.prototype.data = function (options) { + var result = [], + data = this.collection.data, + obj, + len, + i, + method; + + options = options || {}; + + // if user opts to strip meta, then force clones and use 'shallow' if 'force' options are not present + if (options.removeMeta && !options.forceClones) { + options.forceClones = true; + options.forceCloneMethod = options.forceCloneMethod || 'shallow'; + } + + // if collection has delta changes active, then force clones and use 'parse-stringify' for effective change tracking of nested objects + if (!this.collection.disableDeltaChangesApi) { + options.forceClones = true; + options.forceCloneMethod = 'parse-stringify'; + } + + // if this has no filters applied, just return collection.data + if (!this.filterInitialized) { + if (this.filteredrows.length === 0) { + // determine whether we need to clone objects or not + if (this.collection.cloneObjects || options.forceClones) { + len = data.length; + method = options.forceCloneMethod || this.collection.cloneMethod; + + for (i = 0; i < len; i++) { + obj = clone(data[i], method); + if (options.removeMeta) { + delete obj.$loki; + delete obj.meta; + } + result.push(obj); + } + return result; + } + // otherwise we are not cloning so return sliced array with same object references + else { + return data.slice(); + } + } else { + // filteredrows must have been set manually, so use it + this.filterInitialized = true; + } + } + + var fr = this.filteredrows; + len = fr.length; + + if (this.collection.cloneObjects || options.forceClones) { + method = options.forceCloneMethod || this.collection.cloneMethod; + for (i = 0; i < len; i++) { + obj = clone(data[fr[i]], method); + if (options.removeMeta) { + delete obj.$loki; + delete obj.meta; + } + result.push(obj); + } + } else { + for (i = 0; i < len; i++) { + result.push(data[fr[i]]); + } + } + return result; + }; + + /** + * Used to run an update operation on all documents currently in the resultset. + * + * @param {function} updateFunction - User supplied updateFunction(obj) will be executed for each document object. + * @returns {Resultset} this resultset for further chain ops. + * @memberof Resultset + * @example + * users.chain().find({ country: 'de' }).update(function(user) { + * user.phoneFormat = "+49 AAAA BBBBBB"; + * }); + */ + Resultset.prototype.update = function (updateFunction) { + + if (typeof (updateFunction) !== "function") { + throw new TypeError('Argument is not a function'); + } + + // if this has no filters applied, we need to populate filteredrows first + if (!this.filterInitialized && this.filteredrows.length === 0) { + this.filteredrows = this.collection.prepareFullDocIndex(); + } + + var obj, len = this.filteredrows.length, + rcd = this.collection.data; + + // pass in each document object currently in resultset to user supplied updateFunction + for (var idx = 0; idx < len; idx++) { + // if we have cloning option specified or are doing differential delta changes, clone object first + if (this.collection.cloneObjects || !this.collection.disableDeltaChangesApi) { + obj = clone(rcd[this.filteredrows[idx]], this.collection.cloneMethod); + updateFunction(obj); + this.collection.update(obj); + } + else { + // no need to clone, so just perform update on collection data object instance + updateFunction(rcd[this.filteredrows[idx]]); + this.collection.update(rcd[this.filteredrows[idx]]); + } + } + + return this; + }; + + /** + * Removes all document objects which are currently in resultset from collection (as well as resultset) + * + * @returns {Resultset} this (empty) resultset for further chain ops. + * @memberof Resultset + * @example + * // remove users inactive since 1/1/2001 + * users.chain().find({ lastActive: { $lte: new Date("1/1/2001").getTime() } }).remove(); + */ + Resultset.prototype.remove = function () { + + // if this has no filters applied, we need to populate filteredrows first + if (!this.filterInitialized && this.filteredrows.length === 0) { + this.filteredrows = this.collection.prepareFullDocIndex(); + } + + this.collection.removeBatchByPositions(this.filteredrows); + + this.filteredrows = []; + + return this; + }; + + /** + * data transformation via user supplied functions + * + * @param {function} mapFunction - this function accepts a single document for you to transform and return + * @param {function} reduceFunction - this function accepts many (array of map outputs) and returns single value + * @returns {value} The output of your reduceFunction + * @memberof Resultset + * @example + * var db = new loki("order.db"); + * var orders = db.addCollection("orders"); + * orders.insert([{ qty: 4, unitCost: 100.00 }, { qty: 10, unitCost: 999.99 }, { qty: 2, unitCost: 49.99 }]); + * + * function mapfun (obj) { return obj.qty*obj.unitCost }; + * function reducefun(array) { + * var grandTotal=0; + * array.forEach(function(orderTotal) { grandTotal += orderTotal; }); + * return grandTotal; + * } + * var grandOrderTotal = orders.chain().mapReduce(mapfun, reducefun); + * console.log(grandOrderTotal); + */ + Resultset.prototype.mapReduce = function (mapFunction, reduceFunction) { + try { + return reduceFunction(this.data().map(mapFunction)); + } catch (err) { + throw err; + } + }; + + /** + * eqJoin() - Left joining two sets of data. Join keys can be defined or calculated properties + * eqJoin expects the right join key values to be unique. Otherwise left data will be joined on the last joinData object with that key + * @param {Array|Resultset|Collection} joinData - Data array to join to. + * @param {(string|function)} leftJoinKey - Property name in this result set to join on or a function to produce a value to join on + * @param {(string|function)} rightJoinKey - Property name in the joinData to join on or a function to produce a value to join on + * @param {function=} mapFun - (Optional) A function that receives each matching pair and maps them into output objects - function(left,right){return joinedObject} + * @param {object=} dataOptions - options to data() before input to your map function + * @param {bool} dataOptions.removeMeta - allows removing meta before calling mapFun + * @param {boolean} dataOptions.forceClones - forcing the return of cloned objects to your map object + * @param {string} dataOptions.forceCloneMethod - Allows overriding the default or collection specified cloning method. + * @returns {Resultset} A resultset with data in the format [{left: leftObj, right: rightObj}] + * @memberof Resultset + * @example + * var db = new loki('sandbox.db'); + * + * var products = db.addCollection('products'); + * var orders = db.addCollection('orders'); + * + * products.insert({ productId: "100234", name: "flywheel energy storage", unitCost: 19999.99 }); + * products.insert({ productId: "140491", name: "300F super capacitor", unitCost: 129.99 }); + * products.insert({ productId: "271941", name: "fuel cell", unitCost: 3999.99 }); + * products.insert({ productId: "174592", name: "390V 3AH lithium bank", unitCost: 4999.99 }); + * + * orders.insert({ orderDate : new Date("12/1/2017").getTime(), prodId: "174592", qty: 2, customerId: 2 }); + * orders.insert({ orderDate : new Date("4/15/2016").getTime(), prodId: "271941", qty: 1, customerId: 1 }); + * orders.insert({ orderDate : new Date("3/12/2017").getTime(), prodId: "140491", qty: 4, customerId: 4 }); + * orders.insert({ orderDate : new Date("7/31/2017").getTime(), prodId: "100234", qty: 7, customerId: 3 }); + * orders.insert({ orderDate : new Date("8/3/2016").getTime(), prodId: "174592", qty: 3, customerId: 5 }); + * + * var mapfun = function(left, right) { + * return { + * orderId: left.$loki, + * orderDate: new Date(left.orderDate) + '', + * customerId: left.customerId, + * qty: left.qty, + * productId: left.prodId, + * prodName: right.name, + * prodCost: right.unitCost, + * orderTotal: +((right.unitCost * left.qty).toFixed(2)) + * }; + * }; + * + * // join orders with relevant product info via eqJoin + * var orderSummary = orders.chain().eqJoin(products, "prodId", "productId", mapfun).data(); + * + * console.log(orderSummary); + */ + Resultset.prototype.eqJoin = function (joinData, leftJoinKey, rightJoinKey, mapFun, dataOptions) { + + var leftData = [], + leftDataLength, + rightData = [], + rightDataLength, + key, + result = [], + leftKeyisFunction = typeof leftJoinKey === 'function', + rightKeyisFunction = typeof rightJoinKey === 'function', + joinMap = {}; + + //get the left data + leftData = this.data(dataOptions); + leftDataLength = leftData.length; + + //get the right data + if (joinData instanceof Collection) { + rightData = joinData.chain().data(dataOptions); + } else if (joinData instanceof Resultset) { + rightData = joinData.data(dataOptions); + } else if (Array.isArray(joinData)) { + rightData = joinData; + } else { + throw new TypeError('joinData needs to be an array or result set'); + } + rightDataLength = rightData.length; + + //construct a lookup table + + for (var i = 0; i < rightDataLength; i++) { + key = rightKeyisFunction ? rightJoinKey(rightData[i]) : rightData[i][rightJoinKey]; + joinMap[key] = rightData[i]; + } + + if (!mapFun) { + mapFun = function (left, right) { + return { + left: left, + right: right + }; + }; + } + + //Run map function over each object in the resultset + for (var j = 0; j < leftDataLength; j++) { + key = leftKeyisFunction ? leftJoinKey(leftData[j]) : leftData[j][leftJoinKey]; + result.push(mapFun(leftData[j], joinMap[key] || {})); + } + + //return return a new resultset with no filters + this.collection = new Collection('joinData'); + this.collection.insert(result); + this.filteredrows = []; + this.filterInitialized = false; + + return this; + }; + + /** + * Applies a map function into a new collection for further chaining. + * @param {function} mapFun - javascript map function + * @param {object=} dataOptions - options to data() before input to your map function + * @param {bool} dataOptions.removeMeta - allows removing meta before calling mapFun + * @param {boolean} dataOptions.forceClones - forcing the return of cloned objects to your map object + * @param {string} dataOptions.forceCloneMethod - Allows overriding the default or collection specified cloning method. + * @memberof Resultset + * @example + * var orders.chain().find({ productId: 32 }).map(function(obj) { + * return { + * orderId: $loki, + * productId: productId, + * quantity: qty + * }; + * }); + */ + Resultset.prototype.map = function (mapFun, dataOptions) { + var data = this.data(dataOptions).map(mapFun); + //return return a new resultset with no filters + this.collection = new Collection('mappedData'); + this.collection.insert(data); + this.filteredrows = []; + this.filterInitialized = false; + + return this; + }; + + /** + * DynamicView class is a versatile 'live' view class which can have filters and sorts applied. + * Collection.addDynamicView(name) instantiates this DynamicView object and notifies it + * whenever documents are add/updated/removed so it can remain up-to-date. (chainable) + * + * @example + * var mydv = mycollection.addDynamicView('test'); // default is non-persistent + * mydv.applyFind({ 'doors' : 4 }); + * mydv.applyWhere(function(obj) { return obj.name === 'Toyota'; }); + * var results = mydv.data(); + * + * @constructor DynamicView + * @implements LokiEventEmitter + * @param {Collection} collection - A reference to the collection to work against + * @param {string} name - The name of this dynamic view + * @param {object=} options - (Optional) Pass in object with 'persistent' and/or 'sortPriority' options. + * @param {boolean} [options.persistent=false] - indicates if view is to main internal results array in 'resultdata' + * @param {string} [options.sortPriority='passive'] - 'passive' (sorts performed on call to data) or 'active' (after updates) + * @param {number} options.minRebuildInterval - minimum rebuild interval (need clarification to docs here) + * @see {@link Collection#addDynamicView} to construct instances of DynamicView + */ + function DynamicView(collection, name, options) { + this.collection = collection; + this.name = name; + this.rebuildPending = false; + this.options = options || {}; + + if (!this.options.hasOwnProperty('persistent')) { + this.options.persistent = false; + } + + // 'persistentSortPriority': + // 'passive' will defer the sort phase until they call data(). (most efficient overall) + // 'active' will sort async whenever next idle. (prioritizes read speeds) + if (!this.options.hasOwnProperty('sortPriority')) { + this.options.sortPriority = 'passive'; + } + + if (!this.options.hasOwnProperty('minRebuildInterval')) { + this.options.minRebuildInterval = 1; + } + + this.resultset = new Resultset(collection); + this.resultdata = []; + this.resultsdirty = false; + + this.cachedresultset = null; + + // keep ordered filter pipeline + this.filterPipeline = []; + + // sorting member variables + // we only support one active search, applied using applySort() or applySimpleSort() + this.sortFunction = null; + this.sortCriteria = null; + this.sortCriteriaSimple = null; + this.sortDirty = false; + + // for now just have 1 event for when we finally rebuilt lazy view + // once we refactor transactions, i will tie in certain transactional events + + this.events = { + 'rebuild': [] + }; + } + + DynamicView.prototype = new LokiEventEmitter(); + + + /** + * rematerialize() - internally used immediately after deserialization (loading) + * This will clear out and reapply filterPipeline ops, recreating the view. + * Since where filters do not persist correctly, this method allows + * restoring the view to state where user can re-apply those where filters. + * + * @param {Object=} options - (Optional) allows specification of 'removeWhereFilters' option + * @returns {DynamicView} This dynamic view for further chained ops. + * @memberof DynamicView + * @fires DynamicView.rebuild + */ + DynamicView.prototype.rematerialize = function (options) { + var fpl, + fpi, + idx; + + options = options || {}; + + this.resultdata = []; + this.resultsdirty = true; + this.resultset = new Resultset(this.collection); + + if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) { + this.sortDirty = true; + } + + if (options.hasOwnProperty('removeWhereFilters')) { + // for each view see if it had any where filters applied... since they don't + // serialize those functions lets remove those invalid filters + fpl = this.filterPipeline.length; + fpi = fpl; + while (fpi--) { + if (this.filterPipeline[fpi].type === 'where') { + if (fpi !== this.filterPipeline.length - 1) { + this.filterPipeline[fpi] = this.filterPipeline[this.filterPipeline.length - 1]; + } + + this.filterPipeline.length--; + } + } + } + + // back up old filter pipeline, clear filter pipeline, and reapply pipeline ops + var ofp = this.filterPipeline; + this.filterPipeline = []; + + // now re-apply 'find' filterPipeline ops + fpl = ofp.length; + for (idx = 0; idx < fpl; idx++) { + this.applyFind(ofp[idx].val); + } + + // during creation of unit tests, i will remove this forced refresh and leave lazy + this.data(); + + // emit rebuild event in case user wants to be notified + this.emit('rebuild', this); + + return this; + }; + + /** + * branchResultset() - Makes a copy of the internal resultset for branched queries. + * Unlike this dynamic view, the branched resultset will not be 'live' updated, + * so your branched query should be immediately resolved and not held for future evaluation. + * + * @param {(string|array=)} transform - Optional name of collection transform, or an array of transform steps + * @param {object=} parameters - optional parameters (if optional transform requires them) + * @returns {Resultset} A copy of the internal resultset for branched queries. + * @memberof DynamicView + * @example + * var db = new loki('test'); + * var coll = db.addCollection('mydocs'); + * var dv = coll.addDynamicView('myview'); + * var tx = [ + * { + * type: 'offset', + * value: '[%lktxp]pageStart' + * }, + * { + * type: 'limit', + * value: '[%lktxp]pageSize' + * } + * ]; + * coll.addTransform('viewPaging', tx); + * + * // add some records + * + * var results = dv.branchResultset('viewPaging', { pageStart: 10, pageSize: 10 }).data(); + */ + DynamicView.prototype.branchResultset = function (transform, parameters) { + var rs = this.resultset.branch(); + + if (typeof transform === 'undefined') { + return rs; + } + + return rs.transform(transform, parameters); + }; + + /** + * toJSON() - Override of toJSON to avoid circular references + * + */ + DynamicView.prototype.toJSON = function () { + var copy = new DynamicView(this.collection, this.name, this.options); + + copy.resultset = this.resultset; + copy.resultdata = []; // let's not save data (copy) to minimize size + copy.resultsdirty = true; + copy.filterPipeline = this.filterPipeline; + copy.sortFunction = this.sortFunction; + copy.sortCriteria = this.sortCriteria; + copy.sortCriteriaSimple = this.sortCriteriaSimple || null; + copy.sortDirty = this.sortDirty; + + // avoid circular reference, reapply in db.loadJSON() + copy.collection = null; + + return copy; + }; + + /** + * removeFilters() - Used to clear pipeline and reset dynamic view to initial state. + * Existing options should be retained. + * @param {object=} options - configure removeFilter behavior + * @param {boolean=} options.queueSortPhase - (default: false) if true we will async rebuild view (maybe set default to true in future?) + * @memberof DynamicView + */ + DynamicView.prototype.removeFilters = function (options) { + options = options || {}; + + this.rebuildPending = false; + this.resultset.reset(); + this.resultdata = []; + this.resultsdirty = true; + + this.cachedresultset = null; + + // keep ordered filter pipeline + this.filterPipeline = []; + + // sorting member variables + // we only support one active search, applied using applySort() or applySimpleSort() + this.sortFunction = null; + this.sortCriteria = null; + this.sortCriteriaSimple = null; + this.sortDirty = false; + + if (options.queueSortPhase === true) { + this.queueSortPhase(); + } + }; + + /** + * applySort() - Used to apply a sort to the dynamic view + * @example + * dv.applySort(function(obj1, obj2) { + * if (obj1.name === obj2.name) return 0; + * if (obj1.name > obj2.name) return 1; + * if (obj1.name < obj2.name) return -1; + * }); + * + * @param {function} comparefun - a javascript compare function used for sorting + * @returns {DynamicView} this DynamicView object, for further chain ops. + * @memberof DynamicView + */ + DynamicView.prototype.applySort = function (comparefun) { + this.sortFunction = comparefun; + this.sortCriteria = null; + this.sortCriteriaSimple = null; + + this.queueSortPhase(); + + return this; + }; + + /** + * applySimpleSort() - Used to specify a property used for view translation. + * @example + * dv.applySimpleSort("name"); + * + * @param {string} propname - Name of property by which to sort. + * @param {object|boolean=} options - boolean for sort descending or options object + * @param {boolean} [options.desc=false] - whether we should sort descending. + * @param {boolean} [options.disableIndexIntersect=false] - whether we should explicity not use array intersection. + * @param {boolean} [options.forceIndexIntersect=false] - force array intersection (if binary index exists). + * @param {boolean} [options.useJavascriptSorting=false] - whether results are sorted via basic javascript sort. + * @returns {DynamicView} this DynamicView object, for further chain ops. + * @memberof DynamicView + */ + DynamicView.prototype.applySimpleSort = function (propname, options) { + this.sortCriteriaSimple = { propname: propname, options: options || false }; + this.sortCriteria = null; + this.sortFunction = null; + + this.queueSortPhase(); + + return this; + }; + + /** + * applySortCriteria() - Allows sorting a resultset based on multiple columns. + * @example + * // to sort by age and then name (both ascending) + * dv.applySortCriteria(['age', 'name']); + * // to sort by age (ascending) and then by name (descending) + * dv.applySortCriteria(['age', ['name', true]); + * // to sort by age (descending) and then by name (descending) + * dv.applySortCriteria(['age', true], ['name', true]); + * + * @param {array} properties - array of property names or subarray of [propertyname, isdesc] used evaluate sort order + * @returns {DynamicView} Reference to this DynamicView, sorted, for future chain operations. + * @memberof DynamicView + */ + DynamicView.prototype.applySortCriteria = function (criteria) { + this.sortCriteria = criteria; + this.sortCriteriaSimple = null; + this.sortFunction = null; + + this.queueSortPhase(); + + return this; + }; + + /** + * startTransaction() - marks the beginning of a transaction. + * + * @returns {DynamicView} this DynamicView object, for further chain ops. + */ + DynamicView.prototype.startTransaction = function () { + this.cachedresultset = this.resultset.copy(); + + return this; + }; + + /** + * commit() - commits a transaction. + * + * @returns {DynamicView} this DynamicView object, for further chain ops. + */ + DynamicView.prototype.commit = function () { + this.cachedresultset = null; + + return this; + }; + + /** + * rollback() - rolls back a transaction. + * + * @returns {DynamicView} this DynamicView object, for further chain ops. + */ + DynamicView.prototype.rollback = function () { + this.resultset = this.cachedresultset; + + if (this.options.persistent) { + // for now just rebuild the persistent dynamic view data in this worst case scenario + // (a persistent view utilizing transactions which get rolled back), we already know the filter so not too bad. + this.resultdata = this.resultset.data(); + + this.emit('rebuild', this); + } + + return this; + }; + + + /** + * Implementation detail. + * _indexOfFilterWithId() - Find the index of a filter in the pipeline, by that filter's ID. + * + * @param {(string|number)} uid - The unique ID of the filter. + * @returns {number}: index of the referenced filter in the pipeline; -1 if not found. + */ + DynamicView.prototype._indexOfFilterWithId = function (uid) { + if (typeof uid === 'string' || typeof uid === 'number') { + for (var idx = 0, len = this.filterPipeline.length; idx < len; idx += 1) { + if (uid === this.filterPipeline[idx].uid) { + return idx; + } + } + } + return -1; + }; + + /** + * Implementation detail. + * _addFilter() - Add the filter object to the end of view's filter pipeline and apply the filter to the resultset. + * + * @param {object} filter - The filter object. Refer to applyFilter() for extra details. + */ + DynamicView.prototype._addFilter = function (filter) { + this.filterPipeline.push(filter); + this.resultset[filter.type](filter.val); + }; + + /** + * reapplyFilters() - Reapply all the filters in the current pipeline. + * + * @returns {DynamicView} this DynamicView object, for further chain ops. + */ + DynamicView.prototype.reapplyFilters = function () { + this.resultset.reset(); + + this.cachedresultset = null; + if (this.options.persistent) { + this.resultdata = []; + this.resultsdirty = true; + } + + var filters = this.filterPipeline; + this.filterPipeline = []; + + for (var idx = 0, len = filters.length; idx < len; idx += 1) { + this._addFilter(filters[idx]); + } + + if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) { + this.queueSortPhase(); + } else { + this.queueRebuildEvent(); + } + + return this; + }; + + /** + * applyFilter() - Adds or updates a filter in the DynamicView filter pipeline + * + * @param {object} filter - A filter object to add to the pipeline. + * The object is in the format { 'type': filter_type, 'val', filter_param, 'uid', optional_filter_id } + * @returns {DynamicView} this DynamicView object, for further chain ops. + * @memberof DynamicView + */ + DynamicView.prototype.applyFilter = function (filter) { + var idx = this._indexOfFilterWithId(filter.uid); + if (idx >= 0) { + this.filterPipeline[idx] = filter; + return this.reapplyFilters(); + } + + this.cachedresultset = null; + if (this.options.persistent) { + this.resultdata = []; + this.resultsdirty = true; + } + + this._addFilter(filter); + + if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) { + this.queueSortPhase(); + } else { + this.queueRebuildEvent(); + } + + return this; + }; + + /** + * applyFind() - Adds or updates a mongo-style query option in the DynamicView filter pipeline + * + * @param {object} query - A mongo-style query object to apply to pipeline + * @param {(string|number)=} uid - Optional: The unique ID of this filter, to reference it in the future. + * @returns {DynamicView} this DynamicView object, for further chain ops. + * @memberof DynamicView + */ + DynamicView.prototype.applyFind = function (query, uid) { + this.applyFilter({ + type: 'find', + val: query, + uid: uid + }); + return this; + }; + + /** + * applyWhere() - Adds or updates a javascript filter function in the DynamicView filter pipeline + * + * @param {function} fun - A javascript filter function to apply to pipeline + * @param {(string|number)=} uid - Optional: The unique ID of this filter, to reference it in the future. + * @returns {DynamicView} this DynamicView object, for further chain ops. + * @memberof DynamicView + */ + DynamicView.prototype.applyWhere = function (fun, uid) { + this.applyFilter({ + type: 'where', + val: fun, + uid: uid + }); + return this; + }; + + /** + * removeFilter() - Remove the specified filter from the DynamicView filter pipeline + * + * @param {(string|number)} uid - The unique ID of the filter to be removed. + * @returns {DynamicView} this DynamicView object, for further chain ops. + * @memberof DynamicView + */ + DynamicView.prototype.removeFilter = function (uid) { + var idx = this._indexOfFilterWithId(uid); + if (idx < 0) { + throw new Error("Dynamic view does not contain a filter with ID: " + uid); + } + + this.filterPipeline.splice(idx, 1); + this.reapplyFilters(); + return this; + }; + + /** + * count() - returns the number of documents representing the current DynamicView contents. + * + * @returns {number} The number of documents representing the current DynamicView contents. + * @memberof DynamicView + */ + DynamicView.prototype.count = function () { + // in order to be accurate we will pay the minimum cost (and not alter dv state management) + // recurring resultset data resolutions should know internally its already up to date. + // for persistent data this will not update resultdata nor fire rebuild event. + if (this.resultsdirty) { + this.resultdata = this.resultset.data(); + } + + return this.resultset.count(); + }; + + /** + * data() - resolves and pending filtering and sorting, then returns document array as result. + * + * @param {object=} options - optional parameters to pass to resultset.data() if non-persistent + * @param {boolean} options.forceClones - Allows forcing the return of cloned objects even when + * the collection is not configured for clone object. + * @param {string} options.forceCloneMethod - Allows overriding the default or collection specified cloning method. + * Possible values include 'parse-stringify', 'jquery-extend-deep', 'shallow', 'shallow-assign' + * @param {bool} options.removeMeta - Will force clones and strip $loki and meta properties from documents + * @returns {array} An array of documents representing the current DynamicView contents. + * @memberof DynamicView + */ + DynamicView.prototype.data = function (options) { + // using final sort phase as 'catch all' for a few use cases which require full rebuild + if (this.sortDirty || this.resultsdirty) { + this.performSortPhase({ + suppressRebuildEvent: true + }); + } + return (this.options.persistent) ? (this.resultdata) : (this.resultset.data(options)); + }; + + /** + * queueRebuildEvent() - When the view is not sorted we may still wish to be notified of rebuild events. + * This event will throttle and queue a single rebuild event when batches of updates affect the view. + */ + DynamicView.prototype.queueRebuildEvent = function () { + if (this.rebuildPending) { + return; + } + this.rebuildPending = true; + + var self = this; + setTimeout(function () { + if (self.rebuildPending) { + self.rebuildPending = false; + self.emit('rebuild', self); + } + }, this.options.minRebuildInterval); + }; + + /** + * queueSortPhase : If the view is sorted we will throttle sorting to either : + * (1) passive - when the user calls data(), or + * (2) active - once they stop updating and yield js thread control + */ + DynamicView.prototype.queueSortPhase = function () { + // already queued? exit without queuing again + if (this.sortDirty) { + return; + } + this.sortDirty = true; + + var self = this; + if (this.options.sortPriority === "active") { + // active sorting... once they are done and yield js thread, run async performSortPhase() + setTimeout(function () { + self.performSortPhase(); + }, this.options.minRebuildInterval); + } else { + // must be passive sorting... since not calling performSortPhase (until data call), lets use queueRebuildEvent to + // potentially notify user that data has changed. + this.queueRebuildEvent(); + } + }; + + /** + * performSortPhase() - invoked synchronously or asynchronously to perform final sort phase (if needed) + * + */ + DynamicView.prototype.performSortPhase = function (options) { + // async call to this may have been pre-empted by synchronous call to data before async could fire + if (!this.sortDirty && !this.resultsdirty) { + return; + } + + options = options || {}; + + if (this.sortDirty) { + if (this.sortFunction) { + this.resultset.sort(this.sortFunction); + } else if (this.sortCriteria) { + this.resultset.compoundsort(this.sortCriteria); + } else if (this.sortCriteriaSimple) { + this.resultset.simplesort(this.sortCriteriaSimple.propname, this.sortCriteriaSimple.options); + } + + this.sortDirty = false; + } + + if (this.options.persistent) { + // persistent view, rebuild local resultdata array + this.resultdata = this.resultset.data(); + this.resultsdirty = false; + } + + if (!options.suppressRebuildEvent) { + this.emit('rebuild', this); + } + }; + + /** + * evaluateDocument() - internal method for (re)evaluating document inclusion. + * Called by : collection.insert() and collection.update(). + * + * @param {int} objIndex - index of document to (re)run through filter pipeline. + * @param {bool} isNew - true if the document was just added to the collection. + */ + DynamicView.prototype.evaluateDocument = function (objIndex, isNew) { + // if no filter applied yet, the result 'set' should remain 'everything' + if (!this.resultset.filterInitialized) { + if (this.options.persistent) { + this.resultdata = this.resultset.data(); + } + // need to re-sort to sort new document + if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) { + this.queueSortPhase(); + } else { + this.queueRebuildEvent(); + } + return; + } + + var ofr = this.resultset.filteredrows; + var oldPos = (isNew) ? (-1) : (ofr.indexOf(+objIndex)); + var oldlen = ofr.length; + + // creating a 1-element resultset to run filter chain ops on to see if that doc passes filters; + // mostly efficient algorithm, slight stack overhead price (this function is called on inserts and updates) + var evalResultset = new Resultset(this.collection); + evalResultset.filteredrows = [objIndex]; + evalResultset.filterInitialized = true; + var filter; + for (var idx = 0, len = this.filterPipeline.length; idx < len; idx++) { + filter = this.filterPipeline[idx]; + evalResultset[filter.type](filter.val); + } + + // not a true position, but -1 if not pass our filter(s), 0 if passed filter(s) + var newPos = (evalResultset.filteredrows.length === 0) ? -1 : 0; + + // wasn't in old, shouldn't be now... do nothing + if (oldPos === -1 && newPos === -1) return; + + // wasn't in resultset, should be now... add + if (oldPos === -1 && newPos !== -1) { + ofr.push(objIndex); + + if (this.options.persistent) { + this.resultdata.push(this.collection.data[objIndex]); + } + + // need to re-sort to sort new document + if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) { + this.queueSortPhase(); + } else { + this.queueRebuildEvent(); + } + + return; + } + + // was in resultset, shouldn't be now... delete + if (oldPos !== -1 && newPos === -1) { + if (oldPos < oldlen - 1) { + ofr.splice(oldPos, 1); + + if (this.options.persistent) { + this.resultdata.splice(oldPos, 1); + } + } else { + ofr.length = oldlen - 1; + + if (this.options.persistent) { + this.resultdata.length = oldlen - 1; + } + } + + // in case changes to data altered a sort column + if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) { + this.queueSortPhase(); + } else { + this.queueRebuildEvent(); + } + + return; + } + + // was in resultset, should still be now... (update persistent only?) + if (oldPos !== -1 && newPos !== -1) { + if (this.options.persistent) { + // in case document changed, replace persistent view data with the latest collection.data document + this.resultdata[oldPos] = this.collection.data[objIndex]; + } + + // in case changes to data altered a sort column + if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) { + this.queueSortPhase(); + } else { + this.queueRebuildEvent(); + } + + return; + } + }; + + /** + * removeDocument() - internal function called on collection.delete() + * @param {number|number[]} objIndex - index of document to (re)run through filter pipeline. + */ + DynamicView.prototype.removeDocument = function (objIndex) { + var idx, rmidx, rmlen, rxo = {}, fxo = {}; + var adjels = []; + var drs = this.resultset; + var fr = this.resultset.filteredrows; + var frlen = fr.length; + + // if no filter applied yet, the result 'set' should remain 'everything' + if (!this.resultset.filterInitialized) { + if (this.options.persistent) { + this.resultdata = this.resultset.data(); + } + // in case changes to data altered a sort column + if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) { + this.queueSortPhase(); + } else { + this.queueRebuildEvent(); + } + return; + } + + // if passed single index, wrap in array + if (!Array.isArray(objIndex)) { + objIndex = [objIndex]; + } + + rmlen = objIndex.length; + // create intersection object of data indices to remove + for(rmidx=0;rmidx 0) { + // remove them from filtered rows + this.resultset.filteredrows = this.resultset.filteredrows.filter(function(di, idx) { return !fxo[idx]; }); + // if persistent... + if (this.options.persistent) { + // remove from resultdata + this.resultdata = this.resultdata.filter(function(obj, idx) { return !fxo[idx]; }); + } + + // and queue sorts + if (this.sortFunction || this.sortCriteria || this.sortCriteriaSimple) { + this.queueSortPhase(); + } else { + this.queueRebuildEvent(); + } + } + + // to remove holes, we need to 'shift down' indices, this filter function finds number of positions to shift + var filt = function(idx) { return function(di) { return di < drs.filteredrows[idx]; }; }; + + frlen = drs.filteredrows.length; + for (idx = 0; idx < frlen; idx++) { + // grab subset of removed elements where data index is less than current filtered row data index; + // use this to determine how many positions iterated remaining data index needs to be 'shifted down' + adjels = objIndex.filter(filt(idx)); + drs.filteredrows[idx] -= adjels.length; + } + }; + + /** + * mapReduce() - data transformation via user supplied functions + * + * @param {function} mapFunction - this function accepts a single document for you to transform and return + * @param {function} reduceFunction - this function accepts many (array of map outputs) and returns single value + * @returns The output of your reduceFunction + * @memberof DynamicView + */ + DynamicView.prototype.mapReduce = function (mapFunction, reduceFunction) { + try { + return reduceFunction(this.data().map(mapFunction)); + } catch (err) { + throw err; + } + }; + + + /** + * Collection class that handles documents of same type + * @constructor Collection + * @implements LokiEventEmitter + * @param {string} name - collection name + * @param {(array|object)=} options - (optional) array of property names to be indicized OR a configuration object + * @param {array=} [options.unique=[]] - array of property names to define unique constraints for + * @param {array=} [options.exact=[]] - array of property names to define exact constraints for + * @param {array=} [options.indices=[]] - array property names to define binary indexes for + * @param {boolean} [options.adaptiveBinaryIndices=true] - collection indices will be actively rebuilt rather than lazily + * @param {boolean} [options.asyncListeners=false] - whether listeners are invoked asynchronously + * @param {boolean} [options.disableMeta=false] - set to true to disable meta property on documents + * @param {boolean} [options.disableChangesApi=true] - set to false to enable Changes API + * @param {boolean} [options.disableDeltaChangesApi=true] - set to false to enable Delta Changes API (requires Changes API, forces cloning) + * @param {boolean} [options.autoupdate=false] - use Object.observe to update objects automatically + * @param {boolean} [options.clone=false] - specify whether inserts and queries clone to/from user + * @param {boolean} [options.serializableIndices=true[]] - converts date values on binary indexed properties to epoch time + * @param {string} [options.cloneMethod='parse-stringify'] - 'parse-stringify', 'jquery-extend-deep', 'shallow', 'shallow-assign' + * @param {int=} options.ttl - age of document (in ms.) before document is considered aged/stale. + * @param {int=} options.ttlInterval - time interval for clearing out 'aged' documents; not set by default. + * @see {@link Loki#addCollection} for normal creation of collections + */ + function Collection(name, options) { + // the name of the collection + + this.name = name; + // the data held by the collection + this.data = []; + this.idIndex = []; // index of id + this.binaryIndices = {}; // user defined indexes + this.constraints = { + unique: {}, + exact: {} + }; + + // unique contraints contain duplicate object references, so they are not persisted. + // we will keep track of properties which have unique contraint applied here, and regenerate on load + this.uniqueNames = []; + + // transforms will be used to store frequently used query chains as a series of steps + // which itself can be stored along with the database. + this.transforms = {}; + + // the object type of the collection + this.objType = name; + + // in autosave scenarios we will use collection level dirty flags to determine whether save is needed. + // currently, if any collection is dirty we will autosave the whole database if autosave is configured. + // defaulting to true since this is called from addCollection and adding a collection should trigger save + this.dirty = true; + + // private holders for cached data + this.cachedIndex = null; + this.cachedBinaryIndex = null; + this.cachedData = null; + var self = this; + + /* OPTIONS */ + options = options || {}; + + // exact match and unique constraints + if (options.hasOwnProperty('unique')) { + if (!Array.isArray(options.unique)) { + options.unique = [options.unique]; + } + options.unique.forEach(function (prop) { + self.uniqueNames.push(prop); // used to regenerate on subsequent database loads + self.constraints.unique[prop] = new UniqueIndex(prop); + }); + } + + if (options.hasOwnProperty('exact')) { + options.exact.forEach(function (prop) { + self.constraints.exact[prop] = new ExactIndex(prop); + }); + } + + // if set to true we will optimally keep indices 'fresh' during insert/update/remove ops (never dirty/never needs rebuild) + // if you frequently intersperse insert/update/remove ops between find ops this will likely be significantly faster option. + this.adaptiveBinaryIndices = options.hasOwnProperty('adaptiveBinaryIndices') ? options.adaptiveBinaryIndices : true; + + // is collection transactional + this.transactional = options.hasOwnProperty('transactional') ? options.transactional : false; + + // options to clone objects when inserting them + this.cloneObjects = options.hasOwnProperty('clone') ? options.clone : false; + + // default clone method (if enabled) is parse-stringify + this.cloneMethod = options.hasOwnProperty('cloneMethod') ? options.cloneMethod : "parse-stringify"; + + // option to make event listeners async, default is sync + this.asyncListeners = options.hasOwnProperty('asyncListeners') ? options.asyncListeners : false; + + // if set to true we will not maintain a meta property for a document + this.disableMeta = options.hasOwnProperty('disableMeta') ? options.disableMeta : false; + + // disable track changes + this.disableChangesApi = options.hasOwnProperty('disableChangesApi') ? options.disableChangesApi : true; + + // disable delta update object style on changes + this.disableDeltaChangesApi = options.hasOwnProperty('disableDeltaChangesApi') ? options.disableDeltaChangesApi : true; + if (this.disableChangesApi) { this.disableDeltaChangesApi = true; } + + // option to observe objects and update them automatically, ignored if Object.observe is not supported + this.autoupdate = options.hasOwnProperty('autoupdate') ? options.autoupdate : false; + + // by default, if you insert a document into a collection with binary indices, if those indexed properties contain + // a DateTime we will convert to epoch time format so that (across serializations) its value position will be the + // same 'after' serialization as it was 'before'. + this.serializableIndices = options.hasOwnProperty('serializableIndices') ? options.serializableIndices : true; + + //option to activate a cleaner daemon - clears "aged" documents at set intervals. + this.ttl = { + age: null, + ttlInterval: null, + daemon: null + }; + this.setTTL(options.ttl || -1, options.ttlInterval); + + // currentMaxId - change manually at your own peril! + this.maxId = 0; + + this.DynamicViews = []; + + // events + this.events = { + 'insert': [], + 'update': [], + 'pre-insert': [], + 'pre-update': [], + 'close': [], + 'flushbuffer': [], + 'error': [], + 'delete': [], + 'warning': [] + }; + + // changes are tracked by collection and aggregated by the db + this.changes = []; + + // initialize the id index + this.ensureId(); + var indices = []; + // initialize optional user-supplied indices array ['age', 'lname', 'zip'] + if (options && options.indices) { + if (Object.prototype.toString.call(options.indices) === '[object Array]') { + indices = options.indices; + } else if (typeof options.indices === 'string') { + indices = [options.indices]; + } else { + throw new TypeError('Indices needs to be a string or an array of strings'); + } + } + + for (var idx = 0; idx < indices.length; idx++) { + this.ensureIndex(indices[idx]); + } + + function observerCallback(changes) { + + var changedObjects = typeof Set === 'function' ? new Set() : []; + + if (!changedObjects.add) + changedObjects.add = function (object) { + if (this.indexOf(object) === -1) + this.push(object); + return this; + }; + + changes.forEach(function (change) { + changedObjects.add(change.object); + }); + + changedObjects.forEach(function (object) { + if (!hasOwnProperty.call(object, '$loki')) + return self.removeAutoUpdateObserver(object); + try { + self.update(object); + } catch (err) {} + }); + } + + this.observerCallback = observerCallback; + + //Compare changed object (which is a forced clone) with existing object and return the delta + function getChangeDelta(obj, old) { + if (old) { + return getObjectDelta(old, obj); + } + else { + return JSON.parse(JSON.stringify(obj)); + } + } + + this.getChangeDelta = getChangeDelta; + + function getObjectDelta(oldObject, newObject) { + var propertyNames = newObject !== null && typeof newObject === 'object' ? Object.keys(newObject) : null; + if (propertyNames && propertyNames.length && ['string', 'boolean', 'number'].indexOf(typeof(newObject)) < 0) { + var delta = {}; + for (var i = 0; i < propertyNames.length; i++) { + var propertyName = propertyNames[i]; + if (newObject.hasOwnProperty(propertyName)) { + if (!oldObject.hasOwnProperty(propertyName) || self.uniqueNames.indexOf(propertyName) >= 0 || propertyName == '$loki' || propertyName == 'meta') { + delta[propertyName] = newObject[propertyName]; + } + else { + var propertyDelta = getObjectDelta(oldObject[propertyName], newObject[propertyName]); + if (typeof propertyDelta !== "undefined" && propertyDelta != {}) { + delta[propertyName] = propertyDelta; + } + } + } + } + return Object.keys(delta).length === 0 ? undefined : delta; + } + else { + return oldObject === newObject ? undefined : newObject; + } + } + + this.getObjectDelta = getObjectDelta; + + // clear all the changes + function flushChanges() { + self.changes = []; + } + + this.getChanges = function () { + return self.changes; + }; + + this.flushChanges = flushChanges; + + this.setChangesApi = function (enabled) { + self.disableChangesApi = !enabled; + if (!enabled) { self.disableDeltaChangesApi = false; } + }; + + this.on('delete', function deleteCallback(obj) { + if (!self.disableChangesApi) { + self.createChange(self.name, 'R', obj); + } + }); + + this.on('warning', function (warning) { + self.console.warn(warning); + }); + // for de-serialization purposes + flushChanges(); + } + + Collection.prototype = new LokiEventEmitter(); + + /* + * For ChangeAPI default to clone entire object, for delta changes create object with only differences (+ $loki and meta) + */ + Collection.prototype.createChange = function(name, op, obj, old) { + this.changes.push({ + name: name, + operation: op, + obj: op == 'U' && !this.disableDeltaChangesApi ? this.getChangeDelta(obj, old) : JSON.parse(JSON.stringify(obj)) + }); + }; + + Collection.prototype.insertMeta = function(obj) { + var len, idx; + + if (this.disableMeta || !obj) { + return; + } + + // if batch insert + if (Array.isArray(obj)) { + len = obj.length; + + for(idx=0; idx 0) { + * results.forEach(function(name) { + * console.log('problem encountered with index : ' + name); + * }); + * } + */ + Collection.prototype.checkAllIndexes = function (options) { + var key, bIndices = this.binaryIndices; + var results = [], result; + + for (key in bIndices) { + if (hasOwnProperty.call(bIndices, key)) { + result = this.checkIndex(key, options); + if (!result) { + results.push(key); + } + } + } + + return results; + }; + + /** + * Perform checks to determine validity/consistency of a binary index + * @param {string} property - name of the binary-indexed property to check + * @param {object=} options - optional configuration object + * @param {boolean} [options.randomSampling=false] - whether (faster) random sampling should be used + * @param {number} [options.randomSamplingFactor=0.10] - percentage of total rows to randomly sample + * @param {boolean} [options.repair=false] - whether to fix problems if they are encountered + * @returns {boolean} whether the index was found to be valid (before optional correcting). + * @memberof Collection + * @example + * // full test + * var valid = coll.checkIndex('name'); + * // full test with repair (if issues found) + * valid = coll.checkIndex('name', { repair: true }); + * // random sampling (default is 10% of total document count) + * valid = coll.checkIndex('name', { randomSampling: true }); + * // random sampling (sample 20% of total document count) + * valid = coll.checkIndex('name', { randomSampling: true, randomSamplingFactor: 0.20 }); + * // random sampling (implied boolean) + * valid = coll.checkIndex('name', { randomSamplingFactor: 0.20 }); + * // random sampling with repair (if issues found) + * valid = coll.checkIndex('name', { repair: true, randomSampling: true }); + */ + Collection.prototype.checkIndex = function (property, options) { + options = options || {}; + // if 'randomSamplingFactor' specified but not 'randomSampling', assume true + if (options.randomSamplingFactor && options.randomSampling !== false) { + options.randomSampling = true; + } + options.randomSamplingFactor = options.randomSamplingFactor || 0.1; + if (options.randomSamplingFactor < 0 || options.randomSamplingFactor > 1) { + options.randomSamplingFactor = 0.1; + } + + var valid=true, idx, iter, pos, len, biv; + + // make sure we are passed a valid binary index name + if (!this.binaryIndices.hasOwnProperty(property)) { + throw new Error("called checkIndex on property without an index: " + property); + } + + // if lazy indexing, rebuild only if flagged as dirty + if (!this.adaptiveBinaryIndices) { + this.ensureIndex(property); + } + + biv = this.binaryIndices[property].values; + len = biv.length; + + // if the index has an incorrect number of values + if (len !== this.data.length) { + if (options.repair) { + this.ensureIndex(property, true); + } + return false; + } + + if (len === 0) { + return true; + } + + if (len === 1) { + valid = (biv[0] === 0); + } + else { + if (options.randomSampling) { + // validate first and last + if (!LokiOps.$lte(this.data[biv[0]][property], this.data[biv[1]][property])) { + valid=false; + } + if (!LokiOps.$lte(this.data[biv[len-2]][property], this.data[biv[len-1]][property])) { + valid=false; + } + + // if first and last positions are sorted correctly with their nearest neighbor, + // continue onto random sampling phase... + if (valid) { + // # random samplings = total count * sampling factor + iter = Math.floor((len-1) * options.randomSamplingFactor); + + // for each random sampling, validate that the binary index is sequenced properly + // with next higher value. + for(idx=0; idx 0; + + if (adaptiveBatchOverride) { + this.adaptiveBinaryIndices = false; + } + + try { + for (k=0; k < len; k += 1) { + this.update(doc[k]); + } + } + finally { + if (adaptiveBatchOverride) { + this.ensureAllIndexes(); + this.adaptiveBinaryIndices = true; + } + } + + return; + } + + // verify object is a properly formed document + if (!hasOwnProperty.call(doc, '$loki')) { + throw new Error('Trying to update unsynced document. Please save the document first by using insert() or addMany()'); + } + try { + this.startTransaction(); + var arr = this.get(doc.$loki, true), + oldInternal, // ref to existing obj + newInternal, // ref to new internal obj + position, + self = this; + + if (!arr) { + throw new Error('Trying to update a document not in collection.'); + } + + oldInternal = arr[0]; // -internal- obj ref + position = arr[1]; // position in data array + + // if configured to clone, do so now... otherwise just use same obj reference + newInternal = this.cloneObjects || !this.disableDeltaChangesApi ? clone(doc, this.cloneMethod) : doc; + + this.emit('pre-update', doc); + + Object.keys(this.constraints.unique).forEach(function (key) { + self.constraints.unique[key].update(oldInternal, newInternal); + }); + + // operate the update + this.data[position] = newInternal; + + if (newInternal !== doc) { + this.addAutoUpdateObserver(doc); + } + + // now that we can efficiently determine the data[] position of newly added document, + // submit it for all registered DynamicViews to evaluate for inclusion/exclusion + for (var idx = 0; idx < this.DynamicViews.length; idx++) { + this.DynamicViews[idx].evaluateDocument(position, false); + } + + var key; + if (this.adaptiveBinaryIndices) { + // for each binary index defined in collection, immediately update rather than flag for lazy rebuild + var bIndices = this.binaryIndices; + for (key in bIndices) { + this.adaptiveBinaryIndexUpdate(position, key); + } + } + else { + this.flagBinaryIndexesDirty(); + } + + this.idIndex[position] = newInternal.$loki; + //this.flagBinaryIndexesDirty(); + + this.commit(); + this.dirty = true; // for autosave scenarios + + // update meta and store changes if ChangesAPI is enabled + if (this.disableChangesApi) { + this.updateMeta(newInternal, null); + } + else { + this.updateMetaWithChange(newInternal, oldInternal); + } + + var returnObj; + + // if cloning is enabled, emit 'update' event and return with clone of new object + if (this.cloneObjects) { + returnObj = clone(newInternal, this.cloneMethod); + } + else { + returnObj = newInternal; + } + + this.emit('update', returnObj, oldInternal); + return returnObj; + } catch (err) { + this.rollback(); + this.console.error(err.message); + this.emit('error', err); + throw (err); // re-throw error so user does not think it succeeded + } + }; + + /** + * Add object to collection + */ + Collection.prototype.add = function (obj) { + // if parameter isn't object exit with throw + if ('object' !== typeof obj) { + throw new TypeError('Object being added needs to be an object'); + } + // if object you are adding already has id column it is either already in the collection + // or the object is carrying its own 'id' property. If it also has a meta property, + // then this is already in collection so throw error, otherwise rename to originalId and continue adding. + if (typeof (obj.$loki) !== 'undefined') { + throw new Error('Document is already in collection, please use update()'); + } + + /* + * try adding object to collection + */ + try { + this.startTransaction(); + this.maxId++; + + if (isNaN(this.maxId)) { + this.maxId = (this.data[this.data.length - 1].$loki + 1); + } + + obj.$loki = this.maxId; + + if (!this.disableMeta) { + obj.meta.version = 0; + } + + var key, constrUnique = this.constraints.unique; + for (key in constrUnique) { + if (hasOwnProperty.call(constrUnique, key)) { + constrUnique[key].set(obj); + } + } + + // add new obj id to idIndex + this.idIndex.push(obj.$loki); + + // add the object + this.data.push(obj); + + var addedPos = this.data.length - 1; + + // now that we can efficiently determine the data[] position of newly added document, + // submit it for all registered DynamicViews to evaluate for inclusion/exclusion + var dvlen = this.DynamicViews.length; + for (var i = 0; i < dvlen; i++) { + this.DynamicViews[i].evaluateDocument(addedPos, true); + } + + if (this.adaptiveBinaryIndices) { + // for each binary index defined in collection, immediately update rather than flag for lazy rebuild + var bIndices = this.binaryIndices; + for (key in bIndices) { + this.adaptiveBinaryIndexInsert(addedPos, key); + } + } + else { + this.flagBinaryIndexesDirty(); + } + + this.commit(); + this.dirty = true; // for autosave scenarios + + return (this.cloneObjects) ? (clone(obj, this.cloneMethod)) : (obj); + } catch (err) { + this.rollback(); + this.console.error(err.message); + this.emit('error', err); + throw (err); // re-throw error so user does not think it succeeded + } + }; + + /** + * Applies a filter function and passes all results to an update function. + * + * @param {function} filterFunction - filter function whose results will execute update + * @param {function} updateFunction - update function to run against filtered documents + * @memberof Collection + */ + Collection.prototype.updateWhere = function(filterFunction, updateFunction) { + var results = this.where(filterFunction), + i = 0, + obj; + try { + for (i; i < results.length; i++) { + obj = updateFunction(results[i]); + this.update(obj); + } + + } catch (err) { + this.rollback(); + this.console.error(err.message); + } + }; + + /** + * Remove all documents matching supplied filter function. + * For 'mongo-like' querying you should migrate to [findAndRemove()]{@link Collection#findAndRemove}. + * @param {function|object} query - query object to filter on + * @memberof Collection + */ + Collection.prototype.removeWhere = function (query) { + var list; + if (typeof query === 'function') { + list = this.data.filter(query); + this.remove(list); + } else { + this.chain().find(query).remove(); + } + }; + + Collection.prototype.removeDataOnly = function () { + this.remove(this.data.slice()); + }; + + /** + * Internal method to remove a batch of documents from the collection. + * @param {number[]} positions - data/idIndex positions to remove + */ + Collection.prototype.removeBatchByPositions = function(positions) { + var len = positions.length; + var xo = {}; + var dlen, didx, idx; + var bic=Object.keys(this.binaryIndices).length; + var uic=Object.keys(this.constraints.unique).length; + var adaptiveOverride = this.adaptiveBinaryIndices && Object.keys(this.binaryIndices).length > 0; + var doc, self=this; + + try { + this.startTransaction(); + + // create hashobject for positional removal inclusion tests... + // all keys defined in this hashobject represent $loki ids of the documents to remove. + for(idx=0; idx < len; idx++) { + xo[this.idIndex[positions[idx]]] = true; + } + + // if we will need to notify dynamic views and/or binary indices to update themselves... + dlen = this.DynamicViews.length; + if ((dlen > 0) || (bic > 0) || (uic > 0)) { + if (dlen > 0) { + // notify dynamic views to remove relevant documents at data positions + for (didx = 0; didx < dlen; didx++) { + // notify dv of remove (passing batch/array of positions) + this.DynamicViews[didx].removeDocument(positions); + } + } + + // notify binary indices to update + if (this.adaptiveBinaryIndices && !adaptiveOverride) { + // for each binary index defined in collection, immediately update rather than flag for lazy rebuild + var key, bIndices = this.binaryIndices; + + for (key in bIndices) { + this.adaptiveBinaryIndexRemove(positions, key); + } + } + else { + this.flagBinaryIndexesDirty(); + } + + if (uic) { + Object.keys(this.constraints.unique).forEach(function (key) { + for(idx=0; idx < len; idx++) { + doc = self.data[positions[idx]]; + if (doc[key] !== null && doc[key] !== undefined) { + self.constraints.unique[key].remove(doc[key]); + } + } + }); + } + } + + // emit 'delete' events only of listeners are attached. + // since data not removed yet, in future we can emit single delete event with array... + // for now that might be breaking change to put in potential 1.6 or LokiDB (lokijs2) version + if (!this.disableChangesApi || this.events.delete.length > 1) { + for(idx=0; idx < len; idx++) { + this.emit('delete', this.data[positions[idx]]); + } + } + + // remove from data[] : + // filter collection data for items not in inclusion hashobject + this.data = this.data.filter(function(obj) { + return !xo[obj.$loki]; + }); + + // remove from idIndex[] : + // filter idIndex for items not in inclusion hashobject + this.idIndex = this.idIndex.filter(function(id) { + return !xo[id]; + }); + + if (this.adaptiveBinaryIndices && adaptiveOverride) { + this.adaptiveBinaryIndices = false; + this.ensureAllIndexes(true); + this.adaptiveBinaryIndices = true; + } + + this.commit(); + + // flag collection as dirty for autosave + this.dirty = true; + } + catch (err) { + this.rollback(); + if (adaptiveOverride) { + this.adaptiveBinaryIndices = true; + } + this.console.error(err.message); + this.emit('error', err); + return null; + } + }; + + /** + * Internal method called by remove() + * @param {object[]|number[]} batch - array of documents or $loki ids to remove + */ + Collection.prototype.removeBatch = function(batch) { + var len = batch.length, + dlen=this.data.length, + idx; + var xlt = {}; + var posx = []; + + // create lookup hashobject to translate $loki id to position + for (idx=0; idx < dlen; idx++) { + xlt[this.data[idx].$loki] = idx; + } + + // iterate the batch + for (idx=0; idx < len; idx++) { + if (typeof(batch[idx]) === 'object') { + posx.push(xlt[batch[idx].$loki]); + } + else { + posx.push(xlt[batch[idx]]); + } + } + + this.removeBatchByPositions(posx); + }; + + /** + * Remove a document from the collection + * @param {object} doc - document to remove from collection + * @memberof Collection + */ + Collection.prototype.remove = function (doc) { + if (typeof doc === 'number') { + doc = this.get(doc); + } + + if ('object' !== typeof doc) { + throw new Error('Parameter is not an object'); + } + if (Array.isArray(doc)) { + this.removeBatch(doc); + return; + } + + if (!hasOwnProperty.call(doc, '$loki')) { + throw new Error('Object is not a document stored in the collection'); + } + + try { + this.startTransaction(); + var arr = this.get(doc.$loki, true), + // obj = arr[0], + position = arr[1]; + var self = this; + Object.keys(this.constraints.unique).forEach(function (key) { + if (doc[key] !== null && typeof doc[key] !== 'undefined') { + self.constraints.unique[key].remove(doc[key]); + } + }); + // now that we can efficiently determine the data[] position of newly added document, + // submit it for all registered DynamicViews to remove + for (var idx = 0; idx < this.DynamicViews.length; idx++) { + this.DynamicViews[idx].removeDocument(position); + } + + if (this.adaptiveBinaryIndices) { + // for each binary index defined in collection, immediately update rather than flag for lazy rebuild + var key, bIndices = this.binaryIndices; + for (key in bIndices) { + this.adaptiveBinaryIndexRemove(position, key); + } + } + else { + this.flagBinaryIndexesDirty(); + } + + this.data.splice(position, 1); + this.removeAutoUpdateObserver(doc); + + // remove id from idIndex + this.idIndex.splice(position, 1); + + this.commit(); + this.dirty = true; // for autosave scenarios + this.emit('delete', arr[0]); + delete doc.$loki; + delete doc.meta; + return doc; + + } catch (err) { + this.rollback(); + this.console.error(err.message); + this.emit('error', err); + return null; + } + }; + + /*---------------------+ + | Finding methods | + +----------------------*/ + + /** + * Get by Id - faster than other methods because of the searching algorithm + * @param {int} id - $loki id of document you want to retrieve + * @param {boolean} returnPosition - if 'true' we will return [object, position] + * @returns {(object|array|null)} Object reference if document was found, null if not, + * or an array if 'returnPosition' was passed. + * @memberof Collection + */ + Collection.prototype.get = function (id, returnPosition) { + var retpos = returnPosition || false, + data = this.idIndex, + max = data.length - 1, + min = 0, + mid = (min + max) >> 1; + + id = typeof id === 'number' ? id : parseInt(id, 10); + + if (isNaN(id)) { + throw new TypeError('Passed id is not an integer'); + } + + while (data[min] < data[max]) { + mid = (min + max) >> 1; + + if (data[mid] < id) { + min = mid + 1; + } else { + max = mid; + } + } + + if (max === min && data[min] === id) { + if (retpos) { + return [this.data[min], min]; + } + return this.data[min]; + } + return null; + + }; + + /** + * Perform binary range lookup for the data[dataPosition][binaryIndexName] property value + * Since multiple documents may contain the same value (which the index is sorted on), + * we hone in on range and then linear scan range to find exact index array position. + * @param {int} dataPosition : coll.data array index/position + * @param {string} binaryIndexName : index to search for dataPosition in + */ + Collection.prototype.getBinaryIndexPosition = function(dataPosition, binaryIndexName) { + var val = this.data[dataPosition][binaryIndexName]; + var index = this.binaryIndices[binaryIndexName].values; + + // i think calculateRange can probably be moved to collection + // as it doesn't seem to need resultset. need to verify + var range = this.calculateRange("$eq", binaryIndexName, val); + + if (range[0] === 0 && range[1] === -1) { + // uhoh didn't find range + return null; + } + + var min = range[0]; + var max = range[1]; + + // narrow down the sub-segment of index values + // where the indexed property value exactly matches our + // value and then linear scan to find exact -index- position + for(var idx = min; idx <= max; idx++) { + if (index[idx] === dataPosition) return idx; + } + + // uhoh + return null; + }; + + /** + * Adaptively insert a selected item to the index. + * @param {int} dataPosition : coll.data array index/position + * @param {string} binaryIndexName : index to search for dataPosition in + */ + Collection.prototype.adaptiveBinaryIndexInsert = function(dataPosition, binaryIndexName) { + var index = this.binaryIndices[binaryIndexName].values; + var val = this.data[dataPosition][binaryIndexName]; + + // If you are inserting a javascript Date value into a binary index, convert to epoch time + if (this.serializableIndices === true && val instanceof Date) { + this.data[dataPosition][binaryIndexName] = val.getTime(); + val = this.data[dataPosition][binaryIndexName]; + } + + var idxPos = (index.length === 0)?0:this.calculateRangeStart(binaryIndexName, val, true); + + // insert new data index into our binary index at the proper sorted location for relevant property calculated by idxPos. + // doing this after adjusting dataPositions so no clash with previous item at that position. + this.binaryIndices[binaryIndexName].values.splice(idxPos, 0, dataPosition); + }; + + /** + * Adaptively update a selected item within an index. + * @param {int} dataPosition : coll.data array index/position + * @param {string} binaryIndexName : index to search for dataPosition in + */ + Collection.prototype.adaptiveBinaryIndexUpdate = function(dataPosition, binaryIndexName) { + // linear scan needed to find old position within index unless we optimize for clone scenarios later + // within (my) node 5.6.0, the following for() loop with strict compare is -much- faster than indexOf() + var idxPos, + index = this.binaryIndices[binaryIndexName].values, + len=index.length; + + for(idxPos=0; idxPos < len; idxPos++) { + if (index[idxPos] === dataPosition) break; + } + + //var idxPos = this.binaryIndices[binaryIndexName].values.indexOf(dataPosition); + this.binaryIndices[binaryIndexName].values.splice(idxPos, 1); + + //this.adaptiveBinaryIndexRemove(dataPosition, binaryIndexName, true); + this.adaptiveBinaryIndexInsert(dataPosition, binaryIndexName); + }; + + /** + * Adaptively remove a selected item from the index. + * @param {number|number[]} dataPosition : coll.data array index/position + * @param {string} binaryIndexName : index to search for dataPosition in + */ + Collection.prototype.adaptiveBinaryIndexRemove = function(dataPosition, binaryIndexName, removedFromIndexOnly) { + var bi = this.binaryIndices[binaryIndexName]; + var len, idx, rmidx, rmlen, rxo = {}; + var curr, shift, idxPos; + + if (Array.isArray(dataPosition)) { + // when called from chained remove, and only one document in array, + // it will be faster to use old algorithm + rmlen = dataPosition.length; + if (rmlen === 1) { + dataPosition = dataPosition[0]; + } + // we were passed an array (batch) of documents so use this 'batch optimized' algorithm + else { + for(rmidx=0;rmidx sortedPositions[rmidx]; rmidx++) { + shift++; + } + bi.values[idx]-=shift; + } + + // batch processed, bail out + return; + } + + // not a batch so continue... + } + + idxPos = this.getBinaryIndexPosition(dataPosition, binaryIndexName); + + if (idxPos === null) { + // throw new Error('unable to determine binary index position'); + return null; + } + + // remove document from index (with splice) + bi.values.splice(idxPos, 1); + + // if we passed this optional flag parameter, we are calling from adaptiveBinaryIndexUpdate, + // in which case data positions stay the same. + if (removedFromIndexOnly === true) { + return; + } + + // since index stores data array positions, if we remove a document + // we need to adjust array positions -1 for all document positions greater than removed position + len = bi.values.length; + for (idx = 0; idx < len; idx++) { + if (bi.values[idx] > dataPosition) { + bi.values[idx]--; + } + } + }; + + /** + * Internal method used for index maintenance and indexed searching. + * Calculates the beginning of an index range for a given value. + * For index maintainance (adaptive:true), we will return a valid index position to insert to. + * For querying (adaptive:false/undefined), we will : + * return lower bound/index of range of that value (if found) + * return next lower index position if not found (hole) + * If index is empty it is assumed to be handled at higher level, so + * this method assumes there is at least 1 document in index. + * + * @param {string} prop - name of property which has binary index + * @param {any} val - value to find within index + * @param {bool?} adaptive - if true, we will return insert position + */ + Collection.prototype.calculateRangeStart = function (prop, val, adaptive) { + var rcd = this.data; + var index = this.binaryIndices[prop].values; + var min = 0; + var max = index.length - 1; + var mid = 0; + + if (index.length === 0) { + return -1; + } + + var minVal = rcd[index[min]][prop]; + var maxVal = rcd[index[max]][prop]; + + // hone in on start position of value + while (min < max) { + mid = (min + max) >> 1; + + if (Comparators.lt(rcd[index[mid]][prop], val, false)) { + min = mid + 1; + } else { + max = mid; + } + } + + var lbound = min; + + // found it... return it + if (Comparators.aeq(val, rcd[index[lbound]][prop])) { + return lbound; + } + + // if not in index and our value is less than the found one + if (Comparators.lt(val, rcd[index[lbound]][prop], false)) { + return adaptive?lbound:lbound-1; + } + + // not in index and our value is greater than the found one + return adaptive?lbound+1:lbound; + }; + + /** + * Internal method used for indexed $between. Given a prop (index name), and a value + * (which may or may not yet exist) this will find the final position of that upper range value. + */ + Collection.prototype.calculateRangeEnd = function (prop, val) { + var rcd = this.data; + var index = this.binaryIndices[prop].values; + var min = 0; + var max = index.length - 1; + var mid = 0; + + if (index.length === 0) { + return -1; + } + + var minVal = rcd[index[min]][prop]; + var maxVal = rcd[index[max]][prop]; + + // hone in on start position of value + while (min < max) { + mid = (min + max) >> 1; + + if (Comparators.lt(val, rcd[index[mid]][prop], false)) { + max = mid; + } else { + min = mid + 1; + } + } + + var ubound = max; + + // only eq if last element in array is our val + if (Comparators.aeq(val, rcd[index[ubound]][prop])) { + return ubound; + } + + // if not in index and our value is less than the found one + if (Comparators.gt(val, rcd[index[ubound]][prop], false)) { + return ubound+1; + } + + // either hole or first nonmatch + if (Comparators.aeq(val, rcd[index[ubound-1]][prop])) { + return ubound-1; + } + + // hole, so ubound if nearest gt than the val we were looking for + return ubound; + }; + + /** + * calculateRange() - Binary Search utility method to find range/segment of values matching criteria. + * this is used for collection.find() and first find filter of resultset/dynview + * slightly different than get() binary search in that get() hones in on 1 value, + * but we have to hone in on many (range) + * @param {string} op - operation, such as $eq + * @param {string} prop - name of property to calculate range for + * @param {object} val - value to use for range calculation. + * @returns {array} [start, end] index array positions + */ + Collection.prototype.calculateRange = function (op, prop, val) { + var rcd = this.data; + var index = this.binaryIndices[prop].values; + var min = 0; + var max = index.length - 1; + var mid = 0; + var lbound, lval; + var ubound, uval; + + // when no documents are in collection, return empty range condition + if (rcd.length === 0) { + return [0, -1]; + } + + var minVal = rcd[index[min]][prop]; + var maxVal = rcd[index[max]][prop]; + + // if value falls outside of our range return [0, -1] to designate no results + switch (op) { + case '$eq': + case '$aeq': + if (Comparators.lt(val, minVal, false) || Comparators.gt(val, maxVal, false)) { + return [0, -1]; + } + break; + case '$dteq': + if (Comparators.lt(val, minVal, false) || Comparators.gt(val, maxVal, false)) { + return [0, -1]; + } + break; + case '$gt': + // none are within range + if (Comparators.gt(val, maxVal, true)) { + return [0, -1]; + } + // all are within range + if (Comparators.gt(minVal, val, false)) { + return [min, max]; + } + break; + case '$gte': + // none are within range + if (Comparators.gt(val, maxVal, false)) { + return [0, -1]; + } + // all are within range + if (Comparators.gt(minVal, val, true)) { + return [min, max]; + } + break; + case '$lt': + // none are within range + if (Comparators.lt(val, minVal, true)) { + return [0, -1]; + } + // all are within range + if (Comparators.lt(maxVal, val, false)) { + return [min, max]; + } + break; + case '$lte': + // none are within range + if (Comparators.lt(val, minVal, false)) { + return [0, -1]; + } + // all are within range + if (Comparators.lt(maxVal, val, true)) { + return [min, max]; + } + break; + case '$between': + // none are within range (low range is greater) + if (Comparators.gt(val[0], maxVal, false)) { + return [0, -1]; + } + // none are within range (high range lower) + if (Comparators.lt(val[1], minVal, false)) { + return [0, -1]; + } + + lbound = this.calculateRangeStart(prop, val[0]); + ubound = this.calculateRangeEnd(prop, val[1]); + + if (lbound < 0) lbound++; + if (ubound > max) ubound--; + + if (!Comparators.gt(rcd[index[lbound]][prop], val[0], true)) lbound++; + if (!Comparators.lt(rcd[index[ubound]][prop], val[1], true)) ubound--; + + if (ubound < lbound) return [0, -1]; + + return ([lbound, ubound]); + case '$in': + var idxset = [], + segResult = []; + // query each value '$eq' operator and merge the seqment results. + for (var j = 0, len = val.length; j < len; j++) { + var seg = this.calculateRange('$eq', prop, val[j]); + + for (var i = seg[0]; i <= seg[1]; i++) { + if (idxset[i] === undefined) { + idxset[i] = true; + segResult.push(i); + } + } + } + return segResult; + } + + // determine lbound where needed + switch (op) { + case '$eq': + case '$aeq': + case '$dteq': + case '$gte': + case '$lt': + lbound = this.calculateRangeStart(prop, val); + lval = rcd[index[lbound]][prop]; + break; + default: break; + } + + // determine ubound where needed + switch (op) { + case '$eq': + case '$aeq': + case '$dteq': + case '$lte': + case '$gt': + ubound = this.calculateRangeEnd(prop, val); + uval = rcd[index[ubound]][prop]; + break; + default: break; + } + + + switch (op) { + case '$eq': + case '$aeq': + case '$dteq': + // if hole (not found) + if (!Comparators.aeq(lval, val)) { + return [0, -1]; + } + + return [lbound, ubound]; + + case '$gt': + // if hole (not found) ub position is already greater + if (!Comparators.aeq(rcd[index[ubound]][prop], val)) { + return [ubound, max]; + } + // otherwise (found) so ubound is still equal, get next + return [ubound+1, max]; + + case '$gte': + // if hole (not found) lb position marks left outside of range + if (!Comparators.aeq(rcd[index[lbound]][prop], val)) { + return [lbound+1, max]; + } + // otherwise (found) so lb is first position where its equal + return [lbound, max]; + + case '$lt': + // if hole (not found) position already is less than + if (!Comparators.aeq(rcd[index[lbound]][prop], val)) { + return [min, lbound]; + } + // otherwise (found) so lb marks left inside of eq range, get previous + return [min, lbound-1]; + + case '$lte': + // if hole (not found) ub position marks right outside so get previous + if (!Comparators.aeq(rcd[index[ubound]][prop], val)) { + return [min, ubound-1]; + } + // otherwise (found) so ub is last position where its still equal + return [min, ubound]; + + default: + return [0, rcd.length - 1]; + } + }; + + /** + * Retrieve doc by Unique index + * @param {string} field - name of uniquely indexed property to use when doing lookup + * @param {value} value - unique value to search for + * @returns {object} document matching the value passed + * @memberof Collection + */ + Collection.prototype.by = function (field, value) { + var self; + if (value === undefined) { + self = this; + return function (value) { + return self.by(field, value); + }; + } + + var result = this.constraints.unique[field].get(value); + if (!this.cloneObjects) { + return result; + } else { + return clone(result, this.cloneMethod); + } + }; + + /** + * Find one object by index property, by property equal to value + * @param {object} query - query object used to perform search with + * @returns {(object|null)} First matching document, or null if none + * @memberof Collection + */ + Collection.prototype.findOne = function (query) { + query = query || {}; + + // Instantiate Resultset and exec find op passing firstOnly = true param + var result = this.chain().find(query,true).data(); + + if (Array.isArray(result) && result.length === 0) { + return null; + } else { + if (!this.cloneObjects) { + return result[0]; + } else { + return clone(result[0], this.cloneMethod); + } + } + }; + + /** + * Chain method, used for beginning a series of chained find() and/or view() operations + * on a collection. + * + * @param {string|array=} transform - named transform or array of transform steps + * @param {object=} parameters - Object containing properties representing parameters to substitute + * @returns {Resultset} (this) resultset, or data array if any map or join functions where called + * @memberof Collection + */ + Collection.prototype.chain = function (transform, parameters) { + var rs = new Resultset(this); + + if (typeof transform === 'undefined') { + return rs; + } + + return rs.transform(transform, parameters); + }; + + /** + * Find method, api is similar to mongodb. + * for more complex queries use [chain()]{@link Collection#chain} or [where()]{@link Collection#where}. + * @example {@tutorial Query Examples} + * @param {object} query - 'mongo-like' query object + * @returns {array} Array of matching documents + * @memberof Collection + */ + Collection.prototype.find = function (query) { + return this.chain().find(query).data(); + }; + + /** + * Find object by unindexed field by property equal to value, + * simply iterates and returns the first element matching the query + */ + Collection.prototype.findOneUnindexed = function (prop, value) { + var i = this.data.length, + doc; + while (i--) { + if (this.data[i][prop] === value) { + doc = this.data[i]; + return doc; + } + } + return null; + }; + + /** + * Transaction methods + */ + + /** start the transation */ + Collection.prototype.startTransaction = function () { + if (this.transactional) { + this.cachedData = clone(this.data, this.cloneMethod); + this.cachedIndex = this.idIndex; + this.cachedBinaryIndex = this.binaryIndices; + + // propagate startTransaction to dynamic views + for (var idx = 0; idx < this.DynamicViews.length; idx++) { + this.DynamicViews[idx].startTransaction(); + } + } + }; + + /** commit the transation */ + Collection.prototype.commit = function () { + if (this.transactional) { + this.cachedData = null; + this.cachedIndex = null; + this.cachedBinaryIndex = null; + + // propagate commit to dynamic views + for (var idx = 0; idx < this.DynamicViews.length; idx++) { + this.DynamicViews[idx].commit(); + } + } + }; + + /** roll back the transation */ + Collection.prototype.rollback = function () { + if (this.transactional) { + if (this.cachedData !== null && this.cachedIndex !== null) { + this.data = this.cachedData; + this.idIndex = this.cachedIndex; + this.binaryIndices = this.cachedBinaryIndex; + } + + // propagate rollback to dynamic views + for (var idx = 0; idx < this.DynamicViews.length; idx++) { + this.DynamicViews[idx].rollback(); + } + } + }; + + // async executor. This is only to enable callbacks at the end of the execution. + Collection.prototype.async = function (fun, callback) { + setTimeout(function () { + if (typeof fun === 'function') { + fun(); + callback(); + } else { + throw new TypeError('Argument passed for async execution is not a function'); + } + }, 0); + }; + + /** + * Query the collection by supplying a javascript filter function. + * @example + * var results = coll.where(function(obj) { + * return obj.legs === 8; + * }); + * + * @param {function} fun - filter function to run against all collection docs + * @returns {array} all documents which pass your filter function + * @memberof Collection + */ + Collection.prototype.where = function (fun) { + return this.chain().where(fun).data(); + }; + + /** + * Map Reduce operation + * + * @param {function} mapFunction - function to use as map function + * @param {function} reduceFunction - function to use as reduce function + * @returns {data} The result of your mapReduce operation + * @memberof Collection + */ + Collection.prototype.mapReduce = function (mapFunction, reduceFunction) { + try { + return reduceFunction(this.data.map(mapFunction)); + } catch (err) { + throw err; + } + }; + + /** + * Join two collections on specified properties + * + * @param {array|Resultset|Collection} joinData - array of documents to 'join' to this collection + * @param {string} leftJoinProp - property name in collection + * @param {string} rightJoinProp - property name in joinData + * @param {function=} mapFun - (Optional) map function to use + * @param {object=} dataOptions - options to data() before input to your map function + * @param {bool} dataOptions.removeMeta - allows removing meta before calling mapFun + * @param {boolean} dataOptions.forceClones - forcing the return of cloned objects to your map object + * @param {string} dataOptions.forceCloneMethod - Allows overriding the default or collection specified cloning method. + * @returns {Resultset} Result of the mapping operation + * @memberof Collection + */ + Collection.prototype.eqJoin = function (joinData, leftJoinProp, rightJoinProp, mapFun, dataOptions) { + // logic in Resultset class + return new Resultset(this).eqJoin(joinData, leftJoinProp, rightJoinProp, mapFun, dataOptions); + }; + + /* ------ STAGING API -------- */ + /** + * stages: a map of uniquely identified 'stages', which hold copies of objects to be + * manipulated without affecting the data in the original collection + */ + Collection.prototype.stages = {}; + + /** + * (Staging API) create a stage and/or retrieve it + * @memberof Collection + */ + Collection.prototype.getStage = function (name) { + if (!this.stages[name]) { + this.stages[name] = {}; + } + return this.stages[name]; + }; + /** + * a collection of objects recording the changes applied through a commmitStage + */ + Collection.prototype.commitLog = []; + + /** + * (Staging API) create a copy of an object and insert it into a stage + * @memberof Collection + */ + Collection.prototype.stage = function (stageName, obj) { + var copy = JSON.parse(JSON.stringify(obj)); + this.getStage(stageName)[obj.$loki] = copy; + return copy; + }; + + /** + * (Staging API) re-attach all objects to the original collection, so indexes and views can be rebuilt + * then create a message to be inserted in the commitlog + * @param {string} stageName - name of stage + * @param {string} message + * @memberof Collection + */ + Collection.prototype.commitStage = function (stageName, message) { + var stage = this.getStage(stageName), + prop, + timestamp = new Date().getTime(); + + for (prop in stage) { + + this.update(stage[prop]); + this.commitLog.push({ + timestamp: timestamp, + message: message, + data: JSON.parse(JSON.stringify(stage[prop])) + }); + } + this.stages[stageName] = {}; + }; + + Collection.prototype.no_op = function () { + return; + }; + + /** + * @memberof Collection + */ + Collection.prototype.extract = function (field) { + var i = 0, + len = this.data.length, + isDotNotation = isDeepProperty(field), + result = []; + for (i; i < len; i += 1) { + result.push(deepProperty(this.data[i], field, isDotNotation)); + } + return result; + }; + + /** + * @memberof Collection + */ + Collection.prototype.max = function (field) { + return Math.max.apply(null, this.extract(field)); + }; + + /** + * @memberof Collection + */ + Collection.prototype.min = function (field) { + return Math.min.apply(null, this.extract(field)); + }; + + /** + * @memberof Collection + */ + Collection.prototype.maxRecord = function (field) { + var i = 0, + len = this.data.length, + deep = isDeepProperty(field), + result = { + index: 0, + value: undefined + }, + max; + + for (i; i < len; i += 1) { + if (max !== undefined) { + if (max < deepProperty(this.data[i], field, deep)) { + max = deepProperty(this.data[i], field, deep); + result.index = this.data[i].$loki; + } + } else { + max = deepProperty(this.data[i], field, deep); + result.index = this.data[i].$loki; + } + } + result.value = max; + return result; + }; + + /** + * @memberof Collection + */ + Collection.prototype.minRecord = function (field) { + var i = 0, + len = this.data.length, + deep = isDeepProperty(field), + result = { + index: 0, + value: undefined + }, + min; + + for (i; i < len; i += 1) { + if (min !== undefined) { + if (min > deepProperty(this.data[i], field, deep)) { + min = deepProperty(this.data[i], field, deep); + result.index = this.data[i].$loki; + } + } else { + min = deepProperty(this.data[i], field, deep); + result.index = this.data[i].$loki; + } + } + result.value = min; + return result; + }; + + /** + * @memberof Collection + */ + Collection.prototype.extractNumerical = function (field) { + return this.extract(field).map(parseBase10).filter(Number).filter(function (n) { + return !(isNaN(n)); + }); + }; + + /** + * Calculates the average numerical value of a property + * + * @param {string} field - name of property in docs to average + * @returns {number} average of property in all docs in the collection + * @memberof Collection + */ + Collection.prototype.avg = function (field) { + return average(this.extractNumerical(field)); + }; + + /** + * Calculate standard deviation of a field + * @memberof Collection + * @param {string} field + */ + Collection.prototype.stdDev = function (field) { + return standardDeviation(this.extractNumerical(field)); + }; + + /** + * @memberof Collection + * @param {string} field + */ + Collection.prototype.mode = function (field) { + var dict = {}, + data = this.extract(field); + data.forEach(function (obj) { + if (dict[obj]) { + dict[obj] += 1; + } else { + dict[obj] = 1; + } + }); + var max, + prop, mode; + for (prop in dict) { + if (max) { + if (max < dict[prop]) { + mode = prop; + } + } else { + mode = prop; + max = dict[prop]; + } + } + return mode; + }; + + /** + * @memberof Collection + * @param {string} field - property name + */ + Collection.prototype.median = function (field) { + var values = this.extractNumerical(field); + values.sort(sub); + + var half = Math.floor(values.length / 2); + + if (values.length % 2) { + return values[half]; + } else { + return (values[half - 1] + values[half]) / 2.0; + } + }; + + /** + * General utils, including statistical functions + */ + function isDeepProperty(field) { + return field.indexOf('.') !== -1; + } + + function parseBase10(num) { + return parseFloat(num, 10); + } + + function isNotUndefined(obj) { + return obj !== undefined; + } + + function add(a, b) { + return a + b; + } + + function sub(a, b) { + return a - b; + } + + function median(values) { + values.sort(sub); + var half = Math.floor(values.length / 2); + return (values.length % 2) ? values[half] : ((values[half - 1] + values[half]) / 2.0); + } + + function average(array) { + return (array.reduce(add, 0)) / array.length; + } + + function standardDeviation(values) { + var avg = average(values); + var squareDiffs = values.map(function (value) { + var diff = value - avg; + var sqrDiff = diff * diff; + return sqrDiff; + }); + + var avgSquareDiff = average(squareDiffs); + + var stdDev = Math.sqrt(avgSquareDiff); + return stdDev; + } + + function deepProperty(obj, property, isDeep) { + if (isDeep === false) { + // pass without processing + return obj[property]; + } + var pieces = property.split('.'), + root = obj; + while (pieces.length > 0) { + root = root[pieces.shift()]; + } + return root; + } + + function binarySearch(array, item, fun) { + var lo = 0, + hi = array.length, + compared, + mid; + while (lo < hi) { + mid = (lo + hi) >> 1; + compared = fun.apply(null, [item, array[mid]]); + if (compared === 0) { + return { + found: true, + index: mid + }; + } else if (compared < 0) { + hi = mid; + } else { + lo = mid + 1; + } + } + return { + found: false, + index: hi + }; + } + + function BSonSort(fun) { + return function (array, item) { + return binarySearch(array, item, fun); + }; + } + + function KeyValueStore() {} + + KeyValueStore.prototype = { + keys: [], + values: [], + sort: function (a, b) { + return (a < b) ? -1 : ((a > b) ? 1 : 0); + }, + setSort: function (fun) { + this.bs = new BSonSort(fun); + }, + bs: function () { + return new BSonSort(this.sort); + }, + set: function (key, value) { + var pos = this.bs(this.keys, key); + if (pos.found) { + this.values[pos.index] = value; + } else { + this.keys.splice(pos.index, 0, key); + this.values.splice(pos.index, 0, value); + } + }, + get: function (key) { + return this.values[binarySearch(this.keys, key, this.sort).index]; + } + }; + + function UniqueIndex(uniqueField) { + this.field = uniqueField; + this.keyMap = {}; + this.lokiMap = {}; + } + UniqueIndex.prototype.keyMap = {}; + UniqueIndex.prototype.lokiMap = {}; + UniqueIndex.prototype.set = function (obj) { + var fieldValue = obj[this.field]; + if (fieldValue !== null && typeof (fieldValue) !== 'undefined') { + if (this.keyMap[fieldValue]) { + throw new Error('Duplicate key for property ' + this.field + ': ' + fieldValue); + } else { + this.keyMap[fieldValue] = obj; + this.lokiMap[obj.$loki] = fieldValue; + } + } + }; + UniqueIndex.prototype.get = function (key) { + return this.keyMap[key]; + }; + + UniqueIndex.prototype.byId = function (id) { + return this.keyMap[this.lokiMap[id]]; + }; + /** + * Updates a document's unique index given an updated object. + * @param {Object} obj Original document object + * @param {Object} doc New document object (likely the same as obj) + */ + UniqueIndex.prototype.update = function (obj, doc) { + if (this.lokiMap[obj.$loki] !== doc[this.field]) { + var old = this.lokiMap[obj.$loki]; + this.set(doc); + // make the old key fail bool test, while avoiding the use of delete (mem-leak prone) + this.keyMap[old] = undefined; + } else { + this.keyMap[obj[this.field]] = doc; + } + }; + UniqueIndex.prototype.remove = function (key) { + var obj = this.keyMap[key]; + if (obj !== null && typeof obj !== 'undefined') { + this.keyMap[key] = undefined; + this.lokiMap[obj.$loki] = undefined; + } else { + throw new Error('Key is not in unique index: ' + this.field); + } + }; + UniqueIndex.prototype.clear = function () { + this.keyMap = {}; + this.lokiMap = {}; + }; + + function ExactIndex(exactField) { + this.index = {}; + this.field = exactField; + } + + // add the value you want returned to the key in the index + ExactIndex.prototype = { + set: function add(key, val) { + if (this.index[key]) { + this.index[key].push(val); + } else { + this.index[key] = [val]; + } + }, + + // remove the value from the index, if the value was the last one, remove the key + remove: function remove(key, val) { + var idxSet = this.index[key]; + for (var i in idxSet) { + if (idxSet[i] == val) { + idxSet.splice(i, 1); + } + } + if (idxSet.length < 1) { + this.index[key] = undefined; + } + }, + + // get the values related to the key, could be more than one + get: function get(key) { + return this.index[key]; + }, + + // clear will zap the index + clear: function clear(key) { + this.index = {}; + } + }; + + function SortedIndex(sortedField) { + this.field = sortedField; + } + + SortedIndex.prototype = { + keys: [], + values: [], + // set the default sort + sort: function (a, b) { + return (a < b) ? -1 : ((a > b) ? 1 : 0); + }, + bs: function () { + return new BSonSort(this.sort); + }, + // and allow override of the default sort + setSort: function (fun) { + this.bs = new BSonSort(fun); + }, + // add the value you want returned to the key in the index + set: function (key, value) { + var pos = binarySearch(this.keys, key, this.sort); + if (pos.found) { + this.values[pos.index].push(value); + } else { + this.keys.splice(pos.index, 0, key); + this.values.splice(pos.index, 0, [value]); + } + }, + // get all values which have a key == the given key + get: function (key) { + var bsr = binarySearch(this.keys, key, this.sort); + if (bsr.found) { + return this.values[bsr.index]; + } else { + return []; + } + }, + // get all values which have a key < the given key + getLt: function (key) { + var bsr = binarySearch(this.keys, key, this.sort); + var pos = bsr.index; + if (bsr.found) pos--; + return this.getAll(key, 0, pos); + }, + // get all values which have a key > the given key + getGt: function (key) { + var bsr = binarySearch(this.keys, key, this.sort); + var pos = bsr.index; + if (bsr.found) pos++; + return this.getAll(key, pos, this.keys.length); + }, + + // get all vals from start to end + getAll: function (key, start, end) { + var results = []; + for (var i = start; i < end; i++) { + results = results.concat(this.values[i]); + } + return results; + }, + // just in case someone wants to do something smart with ranges + getPos: function (key) { + return binarySearch(this.keys, key, this.sort); + }, + // remove the value from the index, if the value was the last one, remove the key + remove: function (key, value) { + var pos = binarySearch(this.keys, key, this.sort).index; + var idxSet = this.values[pos]; + for (var i in idxSet) { + if (idxSet[i] == value) idxSet.splice(i, 1); + } + if (idxSet.length < 1) { + this.keys.splice(pos, 1); + this.values.splice(pos, 1); + } + }, + // clear will zap the index + clear: function () { + this.keys = []; + this.values = []; + } + }; + + + Loki.LokiOps = LokiOps; + Loki.Collection = Collection; + Loki.KeyValueStore = KeyValueStore; + Loki.LokiMemoryAdapter = LokiMemoryAdapter; + Loki.LokiPartitioningAdapter = LokiPartitioningAdapter; + Loki.LokiLocalStorageAdapter = LokiLocalStorageAdapter; + Loki.LokiFsAdapter = LokiFsAdapter; + Loki.persistenceAdapters = { + fs: LokiFsAdapter, + localStorage: LokiLocalStorageAdapter + }; + Loki.aeq = aeqHelper; + Loki.lt = ltHelper; + Loki.gt = gtHelper; + Loki.Comparators = Comparators; + return Loki; + }()); + +})); diff --git a/packages/partitioning-adapter/spec/generic/partitioning.spec.ts b/packages/partitioning-adapter/spec/generic/partitioning.spec.ts index 5b9333dd..f43bca12 100644 --- a/packages/partitioning-adapter/spec/generic/partitioning.spec.ts +++ b/packages/partitioning-adapter/spec/generic/partitioning.spec.ts @@ -3,13 +3,14 @@ import { Loki } from "../../../loki/src/loki"; import { MemoryStorage } from "../../../memory-storage/src/memory_storage"; import { PartitioningAdapter } from "../../src/partitioning_adapter"; +declare var require: (moduleId: string) => any; +const loki = require("../../../lokijs/lokijs.js"); interface AB { a: number; b: number; } - interface User { name: string; owner: string; @@ -262,8 +263,30 @@ describe("partitioning adapter", () => { }); }); }); + }).catch((e) => { + done.fail(e); }); }); }); }); + + it("from lokijs", (done) => { + const legacyMemAdapter = new loki.LokiMemoryAdapter(); + const legacyDB = new loki("legacyDB", {adapter: new loki.LokiPartitioningAdapter(legacyMemAdapter)}); + const coll = legacyDB.addCollection("myColl"); + coll.insert({name: "Hello World"}); + legacyDB.saveDatabase(() => { + // Load with LokiDB. + const memStorage = new MemoryStorage(); + memStorage.hashStore = legacyMemAdapter.hashStore; + const db = new Loki("legacyDB"); + return db.initializePersistence( {adapter: new PartitioningAdapter(memStorage)}) + .then(() => { + return db.loadDatabase(); + }).then(() => { + expect(db.getCollection<{name: string}>("myColl").find()[0].name).toEqual("Hello World"); + done(); + }); + }); + }); }); diff --git a/packages/partitioning-adapter/src/partitioning_adapter.ts b/packages/partitioning-adapter/src/partitioning_adapter.ts index 13afdb77..13c40611 100644 --- a/packages/partitioning-adapter/src/partitioning_adapter.ts +++ b/packages/partitioning-adapter/src/partitioning_adapter.ts @@ -5,9 +5,9 @@ import { PLUGINS } from "../../common/plugin"; /** * An adapter for adapters. Converts a non reference mode adapter into a reference mode adapter * which can perform destructuring and partitioning. Each collection will be stored in its own key/save and - * only dirty collections will be saved. If you turn on paging with default page size of 25megs and save + * only dirty collections will be saved. If you turn on paging with default page size of 25megs and save * a 75 meg collection it should use up roughly 3 save slots (key/value pairs sent to inner adapter). - * A dirty collection that spans three pages will save all three pages again + * A dirty collection that spans three pages will save all three pages again. * Paging mode was added mainly because Chrome has issues saving 'too large' of a string within a * single IndexedDB row. If a single document update causes the collection to be flagged as dirty, all * of that collection's pages will be written on next save. @@ -74,20 +74,15 @@ export class PartitioningAdapter implements StorageAdapter { * @param {string} dbname - name of the database (filename/keyname) * @returns {Promise} a Promise that resolves after the database was loaded */ - public loadDatabase(dbname: string): Promise { + public loadDatabase(dbname: string): Promise { this._dbname = dbname; this._dbref = new Loki(dbname); // load the db container (without data) return this._adapter.loadDatabase(dbname).then((result: string) => { - if (typeof result !== "string") { - throw new Error("LokiPartitioningAdapter received an unexpected response from inner adapter loadDatabase()"); - } - // I will want to use loki destructuring helper methods so i will inflate into typed instance - let db = JSON.parse(result); - this._dbref.loadJSONObject(db); - db = null; + this._dbref.loadJSON(result); + result = null; // free up memory if (this._dbref._collections.length === 0) { return this._dbref; @@ -98,7 +93,8 @@ export class PartitioningAdapter implements StorageAdapter { pageIndex: 0 }; - return this._loadNextPartition(0).then(() => this._dbref); + return this._loadNextPartition(0) + .then(() => this._dbref); }); } @@ -119,6 +115,7 @@ export class PartitioningAdapter implements StorageAdapter { this._dbref._collections[partition]._data = this._dbref.deserializeCollection(result, { delimited: true }); + result = null; // free up memory if (++partition < this._dbref._collections.length) { return this._loadNextPartition(partition); @@ -139,7 +136,7 @@ export class PartitioningAdapter implements StorageAdapter { // load whatever page is next in sequence return this._adapter.loadDatabase(keyname).then((result: string) => { let data = result.split(this._delimiter); - result = ""; // free up memory now that we have split it into array + result = null; // free up memory let dlen = data.length; // detect if last page by presence of final empty string element and remove it if so @@ -159,7 +156,7 @@ export class PartitioningAdapter implements StorageAdapter { this._dbref._collections[this._pageIterator.collection]._data.push(JSON.parse(data[idx])); data[idx] = null; } - data = []; + data = null; // free up memory // if last page, we are done with this partition if (isLastPage) { @@ -195,7 +192,6 @@ export class PartitioningAdapter implements StorageAdapter { this._dirtyPartitions.push(idx); } } - return this._saveNextPartition(); } @@ -226,13 +222,15 @@ export class PartitioningAdapter implements StorageAdapter { } // otherwise this is 'non-paged' partioning... - const result = this._dbref.serializeDestructured({ + let result = this._dbref.serializeDestructured({ partitioned: true, delimited: true, partition }); return this._adapter.saveDatabase(keyname, result as string).then(() => { + result = null; // free up memory + if (this._dirtyPartitions.length !== 0) { return this._saveNextPartition(); }