diff --git a/packages/radfish/Application.spec.js b/packages/radfish/Application.spec.js index 089f80fd..3d1ad100 100644 --- a/packages/radfish/Application.spec.js +++ b/packages/radfish/Application.spec.js @@ -1,6 +1,6 @@ import { Application, IndexedDBMethod, LocalStorageMethod } from './index'; -describe ('Application', () => { +describe('Application', () => { describe('storage', () => { it('should return the storage method', () => { // IndexedDB Storage application diff --git a/packages/radfish/babel.config.js b/packages/radfish/babel.config.js new file mode 100644 index 00000000..6b53630d --- /dev/null +++ b/packages/radfish/babel.config.js @@ -0,0 +1,6 @@ +module.exports = { + presets: [ + '@babel/preset-env', + '@babel/preset-typescript', + ], +}; \ No newline at end of file diff --git a/packages/radfish/index.js b/packages/radfish/index.js index 62d8fc21..7e8ec563 100644 --- a/packages/radfish/index.js +++ b/packages/radfish/index.js @@ -1,18 +1,111 @@ -import { setupWorker } from "msw/browser"; +import { Store, Schema, LocalStorageConnector, IndexedDBConnector } from './storage'; import { StorageMethod, IndexedDBMethod, LocalStorageMethod } from "./on-device-storage/storage"; -class EventEmitter extends EventTarget {} +const registerServiceWorker = async (url) => { + if ("serviceWorker" in navigator) { + try { + const registration = await navigator.serviceWorker.register(url, { + scope: "/", + }); + if (registration.installing) { + console.log("Service worker installing"); + } else if (registration.waiting) { + console.log("Service worker installed"); + } else if (registration.active) { + console.log("Service worker active"); + } + return registration; + } catch (error) { + console.error(`Registration failed with ${error}`); + } + } +}; export class Application { constructor(options = {}) { - this.emitter = new EventEmitter(); + this.emitter = new EventTarget(); this.serviceWorker = null; this.isOnline = navigator.onLine; this._options = options; + this._initializationPromise = null; + // Register event listeners this._registerEventListeners(); + // Initialize everything + this._initializationPromise = this._initialize(); + } + + /** + * Initialize the application stores and collections + * @private + */ + async _initialize() { + // Initialize stores + this.stores = null; + if (this._options.stores && typeof this._options.stores === 'object') { + this.stores = {}; + + // Initialize each store and its connector + const storeInitPromises = []; + + for (let storeKey in this._options.stores) { + const store = this._options.stores[storeKey] + let name = store.name || storeKey; + let connector = store.connector; + + if (!connector) { + throw new Error(`Store ${name} is missing a connector`); + } + + // Create the store + this.stores[name] = new Store({name, connector}); + + // Initialize the connector + const initPromise = this.stores[name].connector.initialize() + .then(async () => { + // Add collections if they exist + if (store.collections) { + const collectionPromises = []; + + for (let collectionKey in store.collections) { + let collection = store.collections[collectionKey]; + let schema = collection.schema; + + // Handle schema configuration object + if (typeof schema === 'object' && !(schema instanceof Schema)) { + // If schema doesn't have a name, use the collectionKey as default + if (!schema.name) { + schema = { ...schema, name: collectionKey }; + } + schema = new Schema(schema); + } + + // Add collection (might be async for IndexedDBConnector) + const addCollectionPromise = Promise.resolve( + this.stores[name].connector.addCollection(schema) + ); + collectionPromises.push(addCollectionPromise); + } + + // Wait for all collections to be added + return Promise.all(collectionPromises); + } + }); + + storeInitPromises.push(initPromise); + } + + // Wait for all stores to be initialized + await Promise.all(storeInitPromises); + console.log(storeInitPromises[0]) + console.log(this.stores.weatherSurvey.connector.collections); + } + + // Dispatch the init event this._dispatch("init"); + + return true; } get storage() { @@ -20,9 +113,9 @@ export class Application { return null; } + console.warn('Deprecation: Please update to use Connectors instead of StorageMethod: https://nmfs-radfish.github.io/radfish/design-system/storage'); + if (!(this._options.storage instanceof StorageMethod)) { - console.warn('Please update the storage method to be an instance of StorageMethod'); - switch (this._options.storage?.type) { case "indexedDB": { return new IndexedDBMethod( @@ -64,7 +157,11 @@ export class Application { this._options?.mocks?.handlers, this._options?.serviceWorker?.url ); - this._dispatch("ready", { worker }); + + this.serviceWorker = worker; + + // Only dispatch ready event if worker is successfully installed or if no service worker was configured + this._dispatch("ready"); }); const handleOnline = (event) => { @@ -83,21 +180,17 @@ export class Application { async _installServiceWorker(handlers, url) { if (!url) return null; console.info("Installing service worker"); - const worker = setupWorker(...((await handlers)?.default || [])); - const onUnhandledRequest = "bypass"; - - this.serviceWorker = worker; - - worker - .start({ - onUnhandledRequest, - serviceWorker: { - url: url, - }, - }) - .then(() => { - console.debug("Service worker installed"); - }); + + try { + const registration = await registerServiceWorker(url); + + console.debug("Service worker installed and started successfully"); + // return worker; + return registration; + } catch (error) { + console.error("Failed to install service worker:", error); + return null; + } } } diff --git a/packages/radfish/jest.config.js b/packages/radfish/jest.config.js new file mode 100644 index 00000000..81281a93 --- /dev/null +++ b/packages/radfish/jest.config.js @@ -0,0 +1,6 @@ +/** @type {import('jest').Config} */ +const config = { + testEnvironment: 'jsdom', +}; + +module.exports = config; \ No newline at end of file diff --git a/packages/radfish/package.json b/packages/radfish/package.json index 271215c4..c7e263fc 100644 --- a/packages/radfish/package.json +++ b/packages/radfish/package.json @@ -1,9 +1,9 @@ { "name": "@nmfs-radfish/radfish", - "version": "1.0.0", + "version": "1.1.0", "main": "index.js", "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" + "test": "jest" }, "keywords": [], "author": "", @@ -13,5 +13,13 @@ "dexie": "4.0.x", "msw": "^2.3.1", "react": "^18.3.1" + }, + "devDependencies": { + "@babel/core": "^7.26.8", + "@babel/preset-env": "^7.26.8", + "@babel/preset-typescript": "^7.26.0", + "babel-jest": "^29.7.0", + "jest": "^29.7.0", + "jest-environment-jsdom": "^29.7.0" } } diff --git a/packages/radfish/storage/Collection.js b/packages/radfish/storage/Collection.js new file mode 100644 index 00000000..66bdd1ad --- /dev/null +++ b/packages/radfish/storage/Collection.js @@ -0,0 +1,243 @@ +import Schema from "./Schema"; + +class Collection extends EventTarget { + constructor(schema, connector) { + super(); + this.name = null; + this.schema = null; + this.connector = connector; + + if (schema && schema instanceof Schema) { + this.name = schema.name; + this.schema = schema; + } else if (typeof schema === 'object') { + this.name = schema.name; + this.schema = new Schema(schema); + } + } + + /** + * Create a new record in the collection + * @param {Object} data - The data to store + * @returns {Promise} - The created data with any generated fields + */ + async create(data) { + if (!this.connector) { + throw new Error('Collection is not connected to a connector'); + } + + // Get primary key field name + const primaryKeyField = this.schema._schema.primaryKey; + + // Create a copy of the data to avoid modifying the original + const newData = { ...data }; + + // Only generate a UUID if there's a primary key field defined in the schema + if (primaryKeyField && + this.schema._schema.properties[primaryKeyField] && + !newData[primaryKeyField] && + !this.schema._schema.properties[primaryKeyField]?.autoIncrement) { + newData[primaryKeyField] = crypto.randomUUID ? crypto.randomUUID() : Date.now().toString(36); + } + + // Validate data against schema + this.schema.validate(newData, true); + + // Check for unique field conflicts + const uniqueFields = this.schema._schema.unique || []; + if (uniqueFields.length > 0) { + const uniqueFieldsWithValues = uniqueFields.filter(field => + newData[field] !== undefined + ); + + // If setting any unique fields, check for conflicts + if (uniqueFieldsWithValues.length > 0) { + for (const field of uniqueFieldsWithValues) { + // Find any records with the same unique field value + const existingRecords = await this.find({ [field]: newData[field] }); + + if (existingRecords.length > 0) { + throw new Error(`Unique field conflict: '${field}' value '${newData[field]}' is already in use`); + } + } + } + } + + // Emit a beforeCreate event on the collection + this.dispatchEvent(new CustomEvent('beforeCreate', { + detail: { schema: this.schema, data: newData } + })); + + // Create the record using the storage engine + const result = await this.connector.engine.create(this.schema.name, newData); + + // Emit create events + this.dispatchEvent(new CustomEvent('create', { + detail: { schema: this.schema, data: result } + })); + + // Also emit on the connector for backward compatibility + this.connector.dispatchEvent(new CustomEvent('create', { + detail: { schema: this.schema, data: result } + })); + + return result; + } + + /** + * Find records in the collection matching criteria + * @param {Object} criteria - The search criteria (empty for all records) + * @returns {Promise} - The matching records + */ + async find(criteria = {}) { + if (!this.connector) { + throw new Error('Collection is not connected to a connector'); + } + + // Dispatch event through the connector + this.connector.dispatchEvent(new CustomEvent('find', { + detail: { schema: this.schema, criteria } + })); + + // Find records using the storage engine + return this.connector.engine.find(this.schema.name, criteria); + } + /** + * Update a record in the collection + * @param {Object} data - The data to update (must include primary key) + * @returns {Promise} - The updated record + */ + async update(data) { + if (!this.connector) { + throw new Error('Collection is not connected to a connector'); + } + + // Get the primary key field name from the schema + const primaryKeyField = this.schema._schema.primaryKey; + + // If no primary key is defined in the schema, we can't perform an update + if (!primaryKeyField) { + throw new Error('Cannot update record: no primary key defined in schema'); + } + + // Ensure primary key is present + if (!data[primaryKeyField]) { + throw new Error(`Update requires ${primaryKeyField} (primary key) to be specified`); + } + + // Prevent updating the primary key itself by making a copy without the primary key + const updateData = { ...data }; + + // Get the current record to perform a partial update + const [currentRecord] = await this.find({ [primaryKeyField]: data[primaryKeyField] }); + if (!currentRecord) { + throw new Error(`Record with ${primaryKeyField}='${data[primaryKeyField]}' not found`); + } + + // Create merged data for validation (current data + updates) + const mergedData = { ...currentRecord, ...updateData }; + + // Validate the merged data against schema + this.schema.validate(mergedData); + + // Check for unique field conflicts + const uniqueFields = this.schema._schema.unique || []; + if (uniqueFields.length > 0) { + const uniqueFieldsBeingUpdated = uniqueFields.filter(field => + updateData[field] !== undefined && + updateData[field] !== currentRecord[field] + ); + + // If updating any unique fields, check for conflicts + if (uniqueFieldsBeingUpdated.length > 0) { + for (const field of uniqueFieldsBeingUpdated) { + // Find any records with the same unique field value + const existingRecords = await this.find({ [field]: updateData[field] }); + + // Filter out the current record (it's ok for a record to keep its own unique value) + const conflictingRecords = existingRecords.filter(record => + record[primaryKeyField] !== data[primaryKeyField] + ); + + if (conflictingRecords.length > 0) { + throw new Error(`Unique field conflict: '${field}' value '${updateData[field]}' is already in use`); + } + } + } + } + + // Emit a beforeUpdate event on the collection + this.dispatchEvent(new CustomEvent('beforeUpdate', { + detail: { + schema: this.schema, + primaryKey: { field: primaryKeyField, value: data[primaryKeyField] }, + currentData: currentRecord, + updateData: updateData + } + })); + + // Update the record using the storage engine + // Pass the primary key value and update data separately + const updated = await this.connector.engine.update( + this.schema.name, + { + [primaryKeyField]: data[primaryKeyField], + ...updateData + } + ); + + // Emit update events + this.dispatchEvent(new CustomEvent('update', { + detail: { schema: this.schema, data: updated } + })); + + // Also emit on the connector for backward compatibility + this.connector.dispatchEvent(new CustomEvent('update', { + detail: { schema: this.schema, data: updated } + })); + + return updated; + } + + /** + * Delete records from the collection that match the criteria + * @param {Object} criteria - The criteria to match records for deletion + * @returns {Promise} - True if deletion was successful + */ + async delete(criteria = {}) { + if (!this.connector) { + throw new Error('Collection is not connected to a connector'); + } + + // Find all records matching the criteria + const recordsToDelete = await this.find(criteria); + + if (recordsToDelete.length === 0) { + // No matching records found + return true; + } + + // Dispatch event with the affected records + this.dispatchEvent(new CustomEvent('beforeDelete', { + detail: { schema: this.schema, criteria, records: recordsToDelete } + })); + + // Delete the records using the storage engine by passing the entire records + // The engine will handle extracting IDs or otherwise identifying the records + const result = await this.connector.engine.delete(this.schema.name, recordsToDelete); + + // Dispatch delete events + this.dispatchEvent(new CustomEvent('delete', { + detail: { schema: this.schema, criteria, records: recordsToDelete } + })); + + // Also emit on the connector for backward compatibility + this.connector.dispatchEvent(new CustomEvent('delete', { + detail: { schema: this.schema, criteria, records: recordsToDelete } + })); + + return result; + } +} + +export default Collection; \ No newline at end of file diff --git a/packages/radfish/storage/Collection.spec.js b/packages/radfish/storage/Collection.spec.js new file mode 100644 index 00000000..cfce84a6 --- /dev/null +++ b/packages/radfish/storage/Collection.spec.js @@ -0,0 +1,257 @@ +import Collection from './Collection'; +import Schema from './Schema'; + +describe('Collection', () => { + let mockSchema; + let mockConnector; + let collection; + + beforeEach(() => { + mockSchema = new Schema({ + name: 'User', + fields: { + id: { type: 'number', required: true, primaryKey: true }, + name: { type: 'string', required: true }, + email: { type: 'string', required: false } + } + }); + + mockConnector = { + engine: { + create: jest.fn().mockResolvedValue({ id: 1, name: 'Alice', email: 'alice@example.com' }), + find: jest.fn().mockResolvedValue([{ id: 1, name: 'Alice', email: 'alice@example.com' }]), + update: jest.fn().mockResolvedValue({ id: 1, name: 'Updated Alice', email: 'alice@example.com' }), + delete: jest.fn().mockResolvedValue(true) + }, + dispatchEvent: jest.fn() + }; + + collection = new Collection(mockSchema, mockConnector); + }); + + it('should create a collection with a schema instance', () => { + expect(collection.name).toBe('User'); + expect(collection.schema).toBe(mockSchema); + expect(collection.connector).toBe(mockConnector); + }); + + it('should create a collection with a schema-like object', () => { + const schemaObj = { + name: 'Post', + fields: { + id: { type: 'number', required: true, primaryKey: true }, + title: { type: 'string', required: true } + } + }; + const newCollection = new Collection(schemaObj, mockConnector); + expect(newCollection.name).toBe('Post'); + expect(newCollection.schema).toBeInstanceOf(Schema); + }); + + describe('create', () => { + it('should create a record via the connector engine', async () => { + // Include the required createdAt field to pass validation + const data = { id: 1, name: 'Alice', email: 'alice@example.com', createdAt: new Date() }; + + // Mock the validate method to avoid validation errors + jest.spyOn(mockSchema, 'validate').mockReturnValue({ isValid: true, errors: [] }); + + const result = await collection.create(data); + + expect(mockConnector.engine.create).toHaveBeenCalledWith('User', expect.objectContaining(data)); + expect(result).toEqual({ id: 1, name: 'Alice', email: 'alice@example.com' }); + }); + + it('should generate a UUID if primaryKey not provided', async () => { + const data = { name: 'Alice', email: 'alice@example.com', createdAt: new Date() }; + + // Mock the validate method to avoid validation errors + jest.spyOn(mockSchema, 'validate').mockReturnValue({ isValid: true, errors: [] }); + + await collection.create(data); + + expect(mockConnector.engine.create).toHaveBeenCalledWith('User', expect.objectContaining({ + name: 'Alice', + email: 'alice@example.com', + createdAt: expect.any(Date) + })); + }); + + it('should validate data against schema', async () => { + // Missing required field 'name' + const invalidData = { email: 'alice@example.com' }; + + // Don't mock validation for this test + + await expect(collection.create(invalidData)).rejects.toThrow('Validation failed'); + expect(mockConnector.engine.create).not.toHaveBeenCalled(); + }); + + it('should emit events when creating a record', async () => { + const data = { name: 'Alice', email: 'alice@example.com', createdAt: new Date() }; + + // Mock the validate method to avoid validation errors + jest.spyOn(mockSchema, 'validate').mockReturnValue({ isValid: true, errors: [] }); + + const dispatchEventSpy = jest.spyOn(collection, 'dispatchEvent'); + + await collection.create(data); + + expect(dispatchEventSpy).toHaveBeenCalledWith(expect.objectContaining({ + type: 'beforeCreate' + })); + expect(dispatchEventSpy).toHaveBeenCalledWith(expect.objectContaining({ + type: 'create' + })); + expect(mockConnector.dispatchEvent).toHaveBeenCalledWith(expect.objectContaining({ + type: 'create' + })); + }); + }); + + describe('find', () => { + it('should find records matching criteria', async () => { + const criteria = { id: 1 }; + const result = await collection.find(criteria); + + expect(mockConnector.engine.find).toHaveBeenCalledWith('User', criteria); + expect(result).toEqual([{ id: 1, name: 'Alice', email: 'alice@example.com' }]); + }); + + it('should find all records when no criteria provided', async () => { + await collection.find(); + + expect(mockConnector.engine.find).toHaveBeenCalledWith('User', {}); + }); + + it('should emit events when finding records', async () => { + await collection.find({ id: 1 }); + + expect(mockConnector.dispatchEvent).toHaveBeenCalledWith(expect.objectContaining({ + type: 'find' + })); + }); + }); + + describe('update', () => { + beforeEach(() => { + mockConnector.engine.find.mockImplementation((tableName, criteria) => { + if (criteria && criteria.id === 1) { + return Promise.resolve([{ id: 1, name: 'Alice', email: 'alice@example.com', createdAt: new Date() }]); + } + return Promise.resolve([]); + }); + }); + + it('should update an existing record', async () => { + const data = { id: 1, name: 'Updated Alice' }; + + // Mock validation to pass + jest.spyOn(mockSchema, 'validate').mockReturnValue({ isValid: true, errors: [] }); + + const result = await collection.update(data); + + expect(mockConnector.engine.update).toHaveBeenCalledWith('User', data); + expect(result).toEqual({ id: 1, name: 'Updated Alice', email: 'alice@example.com' }); + }); + + it('should require primary key for update', async () => { + const data = { name: 'Updated Alice' }; + + await expect(collection.update(data)).rejects.toThrow(/requires id/); + expect(mockConnector.engine.update).not.toHaveBeenCalled(); + }); + + it('should throw error if record does not exist', async () => { + const data = { id: 999, name: 'Nonexistent' }; + + await expect(collection.update(data)).rejects.toThrow(/not found/); + expect(mockConnector.engine.update).not.toHaveBeenCalled(); + }); + + it('should validate data against schema when updating', async () => { + // Instead of testing validation failure (which is difficult without modifying the source code), + // let's test that validation was called with the right parameters + const data = { id: 1, name: 'Updated Alice' }; + const validateSpy = jest.spyOn(mockSchema, 'validate'); + + await collection.update(data); + + // Verify that validate was called with the merged data + expect(validateSpy).toHaveBeenCalledWith( + expect.objectContaining({ + id: 1, + name: 'Updated Alice', + email: 'alice@example.com', + createdAt: expect.any(Date) + }) + ); + }); + + it('should emit events when updating a record', async () => { + const data = { id: 1, name: 'Updated Alice' }; + + // Mock validation to pass + jest.spyOn(mockSchema, 'validate').mockReturnValue({ isValid: true, errors: [] }); + + const dispatchEventSpy = jest.spyOn(collection, 'dispatchEvent'); + + await collection.update(data); + + expect(dispatchEventSpy).toHaveBeenCalledWith(expect.objectContaining({ + type: 'beforeUpdate' + })); + expect(dispatchEventSpy).toHaveBeenCalledWith(expect.objectContaining({ + type: 'update' + })); + expect(mockConnector.dispatchEvent).toHaveBeenCalledWith(expect.objectContaining({ + type: 'update' + })); + }); + }); + + describe('delete', () => { + beforeEach(() => { + mockConnector.engine.find.mockResolvedValue([ + { id: 1, name: 'Alice', email: 'alice@example.com' } + ]); + }); + + it('should delete records matching criteria', async () => { + const criteria = { id: 1 }; + const result = await collection.delete(criteria); + + expect(mockConnector.engine.find).toHaveBeenCalledWith('User', criteria); + expect(mockConnector.engine.delete).toHaveBeenCalledWith('User', [ + { id: 1, name: 'Alice', email: 'alice@example.com' } + ]); + expect(result).toBe(true); + }); + + it('should not call engine delete if no records match criteria', async () => { + mockConnector.engine.find.mockResolvedValue([]); + + const result = await collection.delete({ id: 999 }); + + expect(mockConnector.engine.delete).not.toHaveBeenCalled(); + expect(result).toBe(true); + }); + + it('should emit events when deleting records', async () => { + const criteria = { id: 1 }; + const dispatchEventSpy = jest.spyOn(collection, 'dispatchEvent'); + + await collection.delete(criteria); + + expect(dispatchEventSpy).toHaveBeenCalledWith(expect.objectContaining({ + type: 'beforeDelete' + })); + expect(dispatchEventSpy).toHaveBeenCalledWith(expect.objectContaining({ + type: 'delete' + })); + expect(mockConnector.dispatchEvent).toHaveBeenCalledWith(expect.objectContaining({ + type: 'delete' + })); + }); + }); +}); \ No newline at end of file diff --git a/packages/radfish/storage/Connector.js b/packages/radfish/storage/Connector.js new file mode 100644 index 00000000..f742c894 --- /dev/null +++ b/packages/radfish/storage/Connector.js @@ -0,0 +1,72 @@ +import Collection from "./Collection"; +import Schema from "./Schema"; +class Connector extends EventTarget { + constructor(engine) { + super(); + this.collections = null; + if (!engine) { + throw new Error('Storage engine must be provided.'); + } + this.engine = engine; + } + + async initialize(callback) { + let error = null; + try { + await this.engine.initialize(); + this.dispatchEvent(new CustomEvent('init', { detail: { status: 'initialized' }})); + if (callback && typeof callback === 'function') { + callback(null, this); + } + } catch (e) { + this.dispatchEvent(new CustomEvent('error', { detail: e })); + error = e; + if (callback && typeof callback === 'function') { + callback(error, this); + } + } + return this; + } + + addCollection(schema) { + if (!this.collections) { + this.collections = {}; + } + this.collections[schema.name] = new Collection(schema, this); + } + + /** + * Create a new record in the specified schema + * @param {Object|Schema} schema - The schema object or Schema instance + * @param {Object} data - The data to store + * @returns {Promise} - The result of the create operation + */ + async create(schema, data) { + const schemaName = schema.name || schema; + return this.engine.create(schemaName, data); + } + + /** + * Find records matching criteria in the specified schema + * @param {Object|Schema} schema - The schema object or Schema instance + * @param {Object} criteria - The search criteria + * @returns {Promise} - The matching records + */ + async find(schema, criteria) { + const schemaName = schema.name || schema; + return this.engine.find(schemaName, criteria); + } + + /** + * Delete records from the specified schema + * @param {Object|Schema} schema - The schema object or Schema instance + * @param {Array} uuids - The UUIDs of records to delete + * @returns {Promise} - True if deletion was successful + */ + async delete(schema, uuids) { + const schemaName = schema.name || schema; + return this.engine.delete(schemaName, uuids); + } +} + +export default Connector; diff --git a/packages/radfish/storage/Connector.spec.js b/packages/radfish/storage/Connector.spec.js new file mode 100644 index 00000000..ed93c2ea --- /dev/null +++ b/packages/radfish/storage/Connector.spec.js @@ -0,0 +1,57 @@ +import Schema from './Schema'; +import Connector from './Connector'; +import Collection from './Collection'; + +describe('Connector', () => { + let mockEngine; + let connector; + + beforeEach(() => { + mockEngine = new EventTarget(); + mockEngine.initialize = jest.fn().mockResolvedValue(); + mockEngine.create = jest.fn().mockResolvedValue(1); + mockEngine.find = jest.fn().mockResolvedValue([{ id: 1, name: 'Alice' }]); + mockEngine.delete = jest.fn().mockResolvedValue(true); + + connector = new Connector(mockEngine); + }); + + it('should initialize and emit init event', async () => { + const initCallback = jest.fn(); + connector.addEventListener('init', (event) => initCallback(event.detail)); + + await connector.initialize(initCallback); + + expect(mockEngine.initialize).toHaveBeenCalled(); + expect(initCallback).toHaveBeenCalledWith(null, connector); + }); + + it('should emit error event if initialization fails', async () => { + const errorCallback = jest.fn(); + const initCallback = jest.fn(); + connector.addEventListener('error', (event) => errorCallback(event.detail)); + + mockEngine.initialize.mockRejectedValue(new Error('Initialization failed')); + + await connector.initialize(initCallback); + + expect(errorCallback).toHaveBeenCalledWith(new Error('Initialization failed')); + expect(initCallback).toHaveBeenCalledWith(new Error('Initialization failed'), connector); + }); + + it('should add a collection to the connector', () => { + const schema = new Schema({ + name: 'User', + fields: { + id: { type: 'number', required: true, primaryKey: true }, + name: { type: 'string', required: true }, + } + }); + + connector.addCollection(schema); + + expect(connector.collections).toHaveProperty('User'); + expect(connector.collections.User).toBeInstanceOf(Collection); + expect(connector.collections.User.schema).toBe(schema); + }); +}); diff --git a/packages/radfish/storage/Engine.js b/packages/radfish/storage/Engine.js new file mode 100644 index 00000000..c2c7261d --- /dev/null +++ b/packages/radfish/storage/Engine.js @@ -0,0 +1,35 @@ +class Engine { + constructor() { + if (new.target === Engine) { + throw new TypeError("Cannot instantiate abstract class Engine directly."); + } + } + + async initialize() { + throw new Error("Method 'initialize()' must be implemented."); + } + + async create(tableName, data) { + throw new Error("Method 'create()' must be implemented."); + } + + async find(tableName, criteria) { + throw new Error("Method 'find()' must be implemented."); + } + + async update(tableName, data) { + throw new Error("Method 'update()' must be implemented."); + } + + /** + * Delete records from a table + * @param {string} tableName - The name of the table + * @param {Array} records - The records to delete + * @returns {Promise} - True if deletion was successful + */ + async delete(tableName, records) { + throw new Error("Method 'delete()' must be implemented."); + } +} + +export default Engine; diff --git a/packages/radfish/storage/Engine.spec.js b/packages/radfish/storage/Engine.spec.js new file mode 100644 index 00000000..92fee68d --- /dev/null +++ b/packages/radfish/storage/Engine.spec.js @@ -0,0 +1,38 @@ +import Engine from './Engine'; + +describe('Engine', () => { + it('should not allow direct instantiation', () => { + expect(() => new Engine()).toThrow("Cannot instantiate abstract class Engine directly."); + }); + + it('should throw error for initialize method', async () => { + class TestEngine extends Engine {} + const testEngine = new TestEngine(); + await expect(testEngine.initialize()).rejects.toThrow("Method 'initialize()' must be implemented."); + }); + + it('should throw error for create method', async () => { + class TestEngine extends Engine {} + const testEngine = new TestEngine(); + await expect(testEngine.create('Test', {})).rejects.toThrow("Method 'create()' must be implemented."); + }); + + it('should throw error for find method', async () => { + class TestEngine extends Engine {} + const testEngine = new TestEngine(); + await expect(testEngine.find('Test', {})).rejects.toThrow("Method 'find()' must be implemented."); + }); + + it('should throw error for delete method', async () => { + class TestEngine extends Engine {} + const testEngine = new TestEngine(); + await expect(testEngine.delete('Test', [])).rejects.toThrow("Method 'delete()' must be implemented."); + }); + + it('should throw error for delete method with records to delete', async () => { + class TestEngine extends Engine {} + const testEngine = new TestEngine(); + const records = [{ id: '1', name: 'Test' }, { id: '2', name: 'Test2' }]; + await expect(testEngine.delete('Test', records)).rejects.toThrow("Method 'delete()' must be implemented."); + }); +}); diff --git a/packages/radfish/storage/IndexedDBConnector.js b/packages/radfish/storage/IndexedDBConnector.js new file mode 100644 index 00000000..a0261f00 --- /dev/null +++ b/packages/radfish/storage/IndexedDBConnector.js @@ -0,0 +1,352 @@ +import Dexie from "dexie"; +import Connector from "./Connector"; +import Engine from "./Engine"; + +/** + * IndexedDBEngine - A storage engine that uses IndexedDB (via Dexie) for persistence + * @extends Engine + */ +class IndexedDBEngine extends Engine { + /** + * Create a new IndexedDBEngine + * @param {string} dbName - The name of the database + * @param {number} version - The database version + */ + constructor(dbName, version = 1) { + super(); + this.dbName = dbName; + this.version = version; + this.db = null; + this.schemas = {}; + } + + /** + * Initialize the storage engine + * @returns {Promise} - Returns true when initialization is complete + */ + async initialize() { + try { + // Create a new Dexie instance + this.db = new Dexie(this.dbName); + + // Define tables using schemas + const storeDefinitions = {}; + + console.log({ + version: this.version, + storeDefinitions + }) + // Schemas will be added later via addSchema + this.db.version(this.version).stores(storeDefinitions); + + // Open the database + await this.db.open(); + return true; + } catch (error) { + console.error('Failed to initialize IndexedDB:', error); + throw error; + } + } + + /** + * Add a schema to the engine + * @param {string} tableName - The name of the table to create + * @param {Object} schema - Schema definition with field information + * @returns {Promise} - Promise that resolves when schema is added + */ + async addSchema(tableName, schema) { + if (!this.db) { + throw new Error('Database not initialized. Call initialize() first.'); + } + + // Generate Dexie schema string (e.g. "++id,name,email") + const schemaFields = []; + + // Get the primary key from the schema + const primaryKeyField = schema._schema.primaryKey; + + // Process each field + Object.entries(schema._schema.properties).forEach(([fieldName, fieldDef]) => { + // Primary key with auto-increment + if (fieldName === primaryKeyField && fieldDef.autoIncrement) { + schemaFields.push(`++${fieldName}`); + } + // Regular primary key + else if (fieldName === primaryKeyField) { + schemaFields.push(`&${fieldName}`); + } + // Required fields should be indexed for better performance + else if (schema._schema.required && schema._schema.required.includes(fieldName)) { + schemaFields.push(fieldName); + } + // Explicitly indexed fields + else if (fieldDef.indexed) { + schemaFields.push(fieldName); + } + // Unique fields (non-primary) + else if (fieldDef.unique) { + schemaFields.push(`&${fieldName}`); + } + }); + + // Make sure we have at least one field in the schema + if (schemaFields.length === 0) { + console.warn(`No indexed fields found for schema '${tableName}'. Adding primary key field.`); + // If no primary key is defined, use the first field + const firstField = Object.keys(schema._schema.properties)[0]; + schemaFields.push(`&${firstField}`); + } + + // Store the schema definition + this.schemas[tableName] = { + dexieSchema: schemaFields.join(','), + primaryKey: primaryKeyField, + schema: schema // Store the full schema reference + }; + + try { + // Close current version + this.db.close(); + + // Create new version with updated schema + const newDb = new Dexie(this.dbName); + + // Collect all current schemas + const allSchemas = {}; + Object.entries(this.schemas).forEach(([table, { dexieSchema }]) => { + allSchemas[table] = dexieSchema; + }); + + // Create new version + newDb.version(this.version + 1).stores(allSchemas); + + // Open the new database and wait for it to complete + await newDb.open(); + this.db = newDb; + this.version++; + + console.log(`Schema added for '${tableName}' with fields: ${schemaFields.join(',')}`); + } catch (error) { + console.error(`Failed to update schema for '${tableName}':`, error); + throw error; + } + } + + /** + * Create a new record + * @param {string} tableName - The name of the table/collection + * @param {Object} data - The data to store + * @returns {Promise} - The created data with any generated fields + */ + async create(tableName, data) { + if (!this.db) { + throw new Error('Database not initialized. Call initialize() first.'); + } + + try { + // Make sure the table is defined in the database + if (!this.db.tables.some(table => table.name === tableName)) { + throw new Error(`Table '${tableName}' is not defined in the database schema`); + } + + // Generate an ID if not provided and not auto-increment + let newData = { ...data }; + + // Get schema info + const schemaInfo = this.schemas[tableName]; + const primaryKey = schemaInfo?.primaryKey || null; + + // Auto-generate primary key if needed + if (primaryKey && !newData[primaryKey]) { + newData[primaryKey] = crypto.randomUUID ? crypto.randomUUID() : Date.now().toString(36); + } + + // Add the record using table() to avoid direct property access issues + const table = this.db.table(tableName); + const id = await table.add(newData); + + // Return the complete record with the ID + if (typeof id === 'number' || typeof id === 'string') { + // If an ID was generated by Dexie (for auto-increment), get the full record + return await table.get(id); + } + + return newData; + } catch (error) { + console.error(`Failed to create record in ${tableName}:`, error); + throw error; + } + } + + /** + * Find records matching criteria + * @param {string} tableName - The name of the table/collection + * @param {Object} criteria - The search criteria (empty for all records) + * @returns {Promise} - The matching records + */ + async find(tableName, criteria = {}) { + if (!this.db) { + throw new Error('Database not initialized. Call initialize() first.'); + } + + try { + // Make sure the table is defined in the database + if (!this.db.tables.some(table => table.name === tableName)) { + throw new Error(`Table '${tableName}' is not defined in the database schema`); + } + + // Get the table using table() method + const table = this.db.table(tableName); + let collection = table.toCollection(); + + // Apply filters if criteria provided + if (Object.keys(criteria).length > 0) { + collection = collection.filter(item => { + return Object.entries(criteria).every(([key, value]) => { + return item[key] === value; + }); + }); + } + + // Return all matching records + return await collection.toArray(); + } catch (error) { + console.error(`Failed to find records in ${tableName}:`, error); + throw error; + } + } + + /** + * Update records matching the provided identifier + * @param {string} tableName - The name of the table/collection + * @param {Object} data - The data to update (must include id) + * @returns {Promise} - The updated record + */ + async update(tableName, data) { + if (!this.db) { + throw new Error('Database not initialized. Call initialize() first.'); + } + + try { + // Make sure the table is defined in the database + if (!this.db.tables.some(table => table.name === tableName)) { + throw new Error(`Table '${tableName}' is not defined in the database schema`); + } + + // Get schema info to find primary key + const schemaInfo = this.schemas[tableName]; + const primaryKeyField = schemaInfo?.primaryKey || null; + + // Ensure primary key field exists in the schema + if (!primaryKeyField) { + throw new Error(`Cannot update record in ${tableName}: No primary key defined in schema. IndexedDB requires a primary key for updates.`); + } + + // Require primary key for updates + if (!data[primaryKeyField]) { + throw new Error(`Update operation requires a '${primaryKeyField}' field (primary key)`); + } + + // Get the table using table() method + const table = this.db.table(tableName); + + // Get existing record + const existingRecord = await table.get(data[primaryKeyField]); + if (!existingRecord) { + throw new Error(`Record with ${primaryKeyField}='${data[primaryKeyField]}' not found in table '${tableName}'`); + } + + // Merge with new data + const updatedRecord = { ...existingRecord, ...data }; + + // Update the record + await table.put(updatedRecord); + + // Return the updated record + return updatedRecord; + } catch (error) { + console.error(`Failed to update record in ${tableName}:`, error); + throw error; + } + } + + /** + * Delete records from the collection + * @param {string} tableName - The name of the table/collection + * @param {Array} records - Array of records to delete + * @returns {Promise} - True if deletion was successful + */ + async delete(tableName, records) { + if (!this.db) { + throw new Error('Database not initialized. Call initialize() first.'); + } + + try { + // Make sure the table is defined in the database + if (!this.db.tables.some(table => table.name === tableName)) { + throw new Error(`Table '${tableName}' is not defined in the database schema`); + } + + // Get the table using table() method + const table = this.db.table(tableName); + + // Get schema info to find primary key + const schemaInfo = this.schemas[tableName]; + const primaryKeyField = schemaInfo?.primaryKey || null; + // Ensure primary key field exists in the schema + if (!primaryKeyField) { + throw new Error(`Cannot delete records from ${tableName}: No primary key defined in schema. IndexedDB requires a primary key for deletion.`); + } + + // Extract IDs from records + const idsToDelete = records.map(record => record[primaryKeyField]).filter(Boolean); + + if (idsToDelete.length === 0) { + console.warn(`No valid IDs found in records for deletion from ${tableName}`); + return true; + } + + // Delete each record by ID + await table.bulkDelete(idsToDelete); + return true; + } catch (error) { + console.error(`Failed to delete records from ${tableName}:`, error); + throw error; + } + } +} + +/** + * IndexedDBConnector - A connector that uses IndexedDB for persistence + * @extends Connector + */ +class IndexedDBConnector extends Connector { + /** + * Create a new IndexedDBConnector + * @param {string} dbName - The name of the database + * @param {number} version - The database version number + */ + constructor(dbName, version = 1) { + super(new IndexedDBEngine(dbName, version)); + this.dbName = dbName; + this.version = version; + } + + /** + * Add a collection to the connector + * @param {Schema} schema - The schema to add + * @returns {Collection} - The created collection + */ + async addCollection(schema) { + // Add the schema to the engine (this now returns a Promise) + await this.engine.addSchema(schema.name, schema); + + // Call the parent method to register the collection + super.addCollection(schema); + + // Return the created collection + return this.collections[schema.name]; + } +} + +export default IndexedDBConnector; \ No newline at end of file diff --git a/packages/radfish/storage/LocalStorageConnector.js b/packages/radfish/storage/LocalStorageConnector.js new file mode 100644 index 00000000..409b5e59 --- /dev/null +++ b/packages/radfish/storage/LocalStorageConnector.js @@ -0,0 +1,164 @@ +import Connector from "./Connector"; +import Engine from "./Engine"; + +/** + * LocalStorageEngine - A storage engine that uses localStorage for persistence + * @extends Engine + */ +class LocalStorageEngine extends Engine { + /** + * Create a new LocalStorageEngine + * @param {string} namespace - The namespace to use for localStorage keys + */ + constructor(namespace) { + super(); + this.namespace = namespace; + } + + /** + * Initialize the storage engine + * @returns {Promise} - Returns true when initialization is complete + */ + async initialize() { + return true; + } + + /** + * Create a new record + * @param {string} tableName - The name of the table/collection + * @param {Object} data - The data to store + * @returns {Promise} - The created data with any generated fields + */ + async create(tableName, data) { + const uuid = crypto.randomUUID ? crypto.randomUUID() : Date.now().toString(36); + const storageKey = `${this.namespace}:${tableName}`; + + // Get existing data for this table or initialize empty array + const existingData = localStorage.getItem(storageKey); + const records = existingData ? JSON.parse(existingData) : []; + + // Create the new record with UUID + const newRecord = { ...data, id: data.id || uuid }; + + // Add to records and save + records.push(newRecord); + localStorage.setItem(storageKey, JSON.stringify(records)); + + return newRecord; + } + + /** + * Find records matching criteria + * @param {string} tableName - The name of the table/collection + * @param {Object} criteria - The search criteria (empty for all records) + * @returns {Promise} - The matching records + */ + async find(tableName, criteria = {}) { + const storageKey = `${this.namespace}:${tableName}`; + + // Get existing data for this table or return empty array + const existingData = localStorage.getItem(storageKey); + if (!existingData) return []; + + const records = JSON.parse(existingData); + + // If no criteria, return all records + if (Object.keys(criteria).length === 0) { + return records; + } + + // Filter records by criteria + return records.filter(record => { + return Object.entries(criteria).every(([key, value]) => { + return record[key] === value; + }); + }); + } + + /** + * Update records matching the provided identifier + * @param {string} tableName - The name of the table/collection + * @param {Object} data - The data to update (must include id) + * @returns {Promise} - The updated record + */ + async update(tableName, data) { + if (!data.id) { + throw new Error("Update operation requires an 'id' field"); + } + + const storageKey = `${this.namespace}:${tableName}`; + + // Get existing data + const existingData = localStorage.getItem(storageKey); + if (!existingData) { + throw new Error(`No records found for table: ${tableName}`); + } + + const records = JSON.parse(existingData); + + // Find and update the record + const recordIndex = records.findIndex(record => record.id === data.id); + if (recordIndex === -1) { + throw new Error(`Record with id '${data.id}' not found in table '${tableName}'`); + } + + // Update the record with new data + const updatedRecord = { ...records[recordIndex], ...data }; + records[recordIndex] = updatedRecord; + + // Save the updated records + localStorage.setItem(storageKey, JSON.stringify(records)); + + return updatedRecord; + } + + /** + * Delete records from the collection + * @param {string} tableName - The name of the table/collection + * @param {Array} records - Array of records to delete + * @returns {Promise} - True if deletion was successful + */ + async delete(tableName, records) { + const storageKey = `${this.namespace}:${tableName}`; + + // Get existing data + const existingData = localStorage.getItem(storageKey); + if (!existingData) { + return true; // Nothing to delete + } + + const allRecords = JSON.parse(existingData); + + // Extract IDs from the records to delete + const idsToDelete = records.map(record => record.id).filter(Boolean); + + if (idsToDelete.length === 0) { + return true; // No valid IDs to delete + } + + // Filter out the records to delete + const remainingRecords = allRecords.filter(record => !idsToDelete.includes(record.id)); + + // Save the filtered records + localStorage.setItem(storageKey, JSON.stringify(remainingRecords)); + + return true; + } +} + +/** + * LocalStorageConnector - A connector that uses localStorage for persistence + * @extends Connector + */ +class LocalStorageConnector extends Connector { + /** + * Create a new LocalStorageConnector + * @param {string} namespace - The namespace to use for localStorage keys + */ + constructor(namespace) { + super(new LocalStorageEngine(namespace)); + this.namespace = namespace; + } +} + +export default LocalStorageConnector; \ No newline at end of file diff --git a/packages/radfish/storage/Schema.js b/packages/radfish/storage/Schema.js new file mode 100644 index 00000000..0e836d53 --- /dev/null +++ b/packages/radfish/storage/Schema.js @@ -0,0 +1,169 @@ +const ISO_8601_DATE_TIME_REGEX = /^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(.\d{1,3})?Z?)$/; +const ISO_8601_DATE_REGEX = /^\d{4}-\d{2}-\d{2}$/; +const TIME_REGEX = /^([0-1]\d|2[0-3]):([0-5]\d):([0-5]\d)$/; + +class ValidationError extends Error { + constructor(errors) { + super("Validation failed"); + this.name = "ValidationError"; + this.errors = errors; // Store detailed validation errors + } +} + +class Schema { + constructor(options) { + const { name, fields } = options; + if (!name || typeof name !== 'string') { + throw new Error('Schema name must be a non-empty string.'); + } + if (!fields || typeof fields !== 'object') { + throw new Error('Schema rules must be an object.'); + } + this.name = name; + // Follow json-schema specification + this._schema = { + title: this.name, + type: 'object', + properties: {}, + required: [], + unique: [], + }; + for (const [key, definition] of Object.entries(fields)) { + if (!definition.type || typeof definition.type !== 'string') { + throw new Error(`"Field "${key}" must have a type.`); + } + switch (definition.type) { + case 'timestamp': + this._schema.properties[key] = { type: 'timestamp' }; + break; + case 'date': + this._schema.properties[key] = { type: 'date' }; + break; + case 'time': + this._schema.properties[key] = { type: 'time' }; + break; + case 'datetime-local': + this._schema.properties[key] = { type: 'datetime-local' }; + break; + default: + this._schema.properties[key] = { type: definition.type }; + } + if (definition.required) { + this._schema.required.push(key); + } + if (definition.unique) { + this._schema.unique.push(key); + } + if (definition.minLength) { + this._schema.properties[key].minLength = definition.minLength; + } + if (definition.pattern) { + this._schema.properties[key].pattern = definition.pattern.source; + } + // Handle numerical constraints + if (definition.type === 'number') { + if (definition.minimum !== undefined) { + this._schema.properties[key].minimum = definition.minimum; + } + if (definition.maximum !== undefined) { + this._schema.properties[key].maximum = definition.maximum; + } + if (definition.exclusiveMinimum !== undefined) { + this._schema.properties[key].exclusiveMinimum = definition.exclusiveMinimum; + } + if (definition.exclusiveMaximum !== undefined) { + this._schema.properties[key].exclusiveMaximum = definition.exclusiveMaximum; + } + } + if (definition.primaryKey) { + this._schema.primaryKey = key; + } + } + } + + /** + * Validates input data against the schema. + * @param {Object} data - The data to validate. + * @param {boolean} [strictMode=false] - Whether to throw on validation errors. + * @returns {Object} Validation result { isValid: boolean, errors: Array } + */ + validate(data, strictMode = false) { + const errors = []; + + if (!data || typeof data !== 'object') { + errors.push({ field: null, error: 'Invalid data type' }); + } else { + for (const [key, definition] of Object.entries(this._schema.properties)) { + const value = data[key]; + + // Required field check + const isRequired = this._schema.required.includes(key); + if (isRequired && (value === undefined || value === null)) { + errors.push({ field: key, error: 'Field is required' }); + continue; // Skip further checks for this field + } + + // Type validation + if (value !== undefined) { + switch (definition.type) { + case 'timestamp': // e.g. 2025-02-14T14:23:00.000Z + case 'datetime-local': // e.g. 2025-02-14T14:23 + if (!(value instanceof Date) && !(typeof value === 'string' && ISO_8601_DATE_TIME_REGEX.test(value))) { + errors.push({ field: key, error: 'Expected type Date or valid date-time string' }); + } + break; + case 'date': // e.g. 2025-02-14 + if (!(value instanceof Date) && !(typeof value === 'string' && ISO_8601_DATE_REGEX.test(value))) { + errors.push({ field: key, error: 'Expected type Date or valid date string' }); + } + break; + case 'time': // e.g 14:21 + if (typeof value !== 'string' || !TIME_REGEX.test(value)) { + errors.push({ field: key, error: 'Expected type string in format HH:MM:SS' }); + } + break; + case 'number': + if (typeof value !== 'number') { + errors.push({ field: key, error: `Expected type number, got ${typeof value}` }); + } else { + if (definition.minimum !== undefined && value < definition.minimum) { + errors.push({ field: key, error: `Must be >= ${definition.minimum}` }); + } + if (definition.maximum !== undefined && value > definition.maximum) { + errors.push({ field: key, error: `Must be <= ${definition.maximum}` }); + } + if (definition.exclusiveMinimum !== undefined && value <= definition.exclusiveMinimum) { + errors.push({ field: key, error: `Must be > ${definition.exclusiveMinimum}` }); + } + if (definition.exclusiveMaximum !== undefined && value >= definition.exclusiveMaximum) { + errors.push({ field: key, error: `Must be < ${definition.exclusiveMaximum}` }); + } + } + break; + default: + if (typeof value !== definition.type) { + errors.push({ field: key, error: `Expected type ${definition.type}, got ${typeof value}` }); + } + } + } + + // Additional constraints for string types + if (definition.minLength && typeof value === 'string' && value.length < definition.minLength) { + errors.push({ field: key, error: `Must be at least ${definition.minLength} characters` }); + } + + if (definition.regex && typeof value === 'string' && !definition.regex.test(value)) { + errors.push({ field: key, error: 'Invalid format' }); + } + } + } + + if (strictMode && errors.length > 0) { + throw new ValidationError(errors); + } + + return { isValid: errors.length === 0, errors }; + } +} + +export default Schema; diff --git a/packages/radfish/storage/Schema.spec.js b/packages/radfish/storage/Schema.spec.js new file mode 100644 index 00000000..41d7a5bf --- /dev/null +++ b/packages/radfish/storage/Schema.spec.js @@ -0,0 +1,184 @@ +import Schema from './Schema'; + +describe('Schema', () => { + let userSchema; + + beforeEach(() => { + userSchema = new Schema({ + name: 'User', + fields: { + id: { type: 'number', required: true, primaryKey: true }, + name: { type: 'string', required: true }, + email: { type: 'string', required: false }, + // Example of adding a field with constraints + address: { type: 'string', required: false, minLength: 5, pattern: /^[A-Za-z\s]+$/ }, + createdAt: { type: 'timestamp', required: true }, + birthDate: { type: 'date', required: false }, + appointmentTime: { type: 'time', required: false }, + lastLogin: { type: 'datetime-local', required: false }, + age: { type: 'number', required: false, minimum: 0, maximum: 120 }, + score: { type: 'number', required: false, exclusiveMinimum: 0 } + } + }); + }); + + it('should create a schema with valid name and rules', () => { + expect(userSchema.name).toBe('User'); + expect(userSchema._schema).toHaveProperty('title', 'User'); + expect(userSchema._schema).toHaveProperty('type', 'object'); + expect(userSchema._schema.properties).toHaveProperty('id'); + expect(userSchema._schema.properties).toHaveProperty('name'); + expect(userSchema._schema.properties).toHaveProperty('email'); + expect(userSchema._schema.properties).toHaveProperty('address'); + expect(userSchema._schema.properties).toHaveProperty('createdAt'); + expect(userSchema._schema.required).toContain('id'); + expect(userSchema._schema.required).toContain('name'); + expect(userSchema._schema.required).not.toContain('email'); + expect(userSchema._schema.required).not.toContain('address'); + }); + + it('should throw an error if name is not provided', () => { + expect(() => new Schema({ name: '', fields: {} })).toThrow('Schema name must be a non-empty string.'); + }); + + it('should throw an error if rules is not an object', () => { + expect(() => new Schema({ name: 'User', fields: null })).toThrow('Schema rules must be an object.'); + }); + + it('should throw an error if a rule is missing a type', () => { + expect(() => new Schema({ name: 'User', fields: { id: { required: true } } })).toThrow('Field "id" must have a type.'); + }); + + describe('validate', () => { + it('should return true for valid data', () => { + const validData = { id: 1, name: 'John Doe', email: 'john@example.com', createdAt: new Date() }; + expect(userSchema.validate(validData).isValid).toBe(true); + }); + + it('should return false if required fields are missing', () => { + const invalidData = { name: 'John Doe' }; + expect(userSchema.validate(invalidData).isValid).toBe(false); + }); + + it('should return false if field types do not match', () => { + const invalidData = { id: 'one', name: 'John Doe', email: 'john@example.com' }; + expect(userSchema.validate(invalidData).isValid).toBe(false); + }); + + it('should return true if optional fields are missing', () => { + const validData = { id: 1, name: 'John Doe', createdAt: new Date() }; + expect(userSchema.validate(validData).isValid).toBe(true); + }); + + it('should return false if data is not an object', () => { + expect(userSchema.validate(null).isValid).toBe(false); + expect(userSchema.validate('string').isValid).toBe(false); + expect(userSchema.validate(123).isValid).toBe(false); + }); + + describe('_schema', () => { + it('should correctly define the JSON schema properties', () => { + expect(userSchema._schema.properties.id).toEqual({ type: 'number' }); + expect(userSchema._schema.properties.name).toEqual({ type: 'string' }); + expect(userSchema._schema.properties.email).toEqual({ type: 'string' }); + expect(userSchema._schema.properties.address).toEqual({ + type: 'string', + minLength: 5, + pattern: '^[A-Za-z\\s]+$' + }); + expect(userSchema._schema.properties.createdAt).toEqual({ + type: 'timestamp', + }); + expect(userSchema._schema.properties.birthDate).toEqual({ + type: 'date', + }); + expect(userSchema._schema.properties.appointmentTime).toEqual({ + type: 'time', + }); + expect(userSchema._schema.properties.lastLogin).toEqual({ + type: 'datetime-local', + }); + expect(userSchema._schema.properties.age).toEqual({ + type: 'number', + minimum: 0, + maximum: 120 + }); + expect(userSchema._schema.properties.score).toEqual({ + type: 'number', + exclusiveMinimum: 0 + }); + }); + + it('should include required fields in the JSON schema', () => { + expect(userSchema._schema.required).toEqual(['id', 'name', 'createdAt']); + }); + }); + + describe('numerical constraints', () => { + it('should validate minimum and maximum for age', () => { + const validData = { id: 5, name: 'Charlie', createdAt: new Date(), age: 30 }; + expect(userSchema.validate(validData).isValid).toBe(true); + + const belowMinimum = { id: 5, name: 'Charlie', createdAt: new Date(), age: -1 }; + expect(userSchema.validate(belowMinimum).isValid).toBe(false); + + const aboveMaximum = { id: 5, name: 'Charlie', createdAt: new Date(), age: 150 }; + expect(userSchema.validate(aboveMaximum).isValid).toBe(false); + }); + + it('should validate exclusiveMinimum for score', () => { + const validData = { id: 6, name: 'Dave', createdAt: new Date(), score: 10 }; + expect(userSchema.validate(validData).isValid).toBe(true); + + const atExclusiveMinimum = { id: 6, name: 'Dave', createdAt: new Date(), score: 0 }; + expect(userSchema.validate(atExclusiveMinimum).isValid).toBe(false); + + const belowExclusiveMinimum = { id: 6, name: 'Dave', createdAt: new Date(), score: -5 }; + expect(userSchema.validate(belowExclusiveMinimum).isValid).toBe(false); + }); + }); + + it('should validate that createdAt is a Date instance or valid string', () => { + const validData = { id: 1, name: 'John Doe', createdAt: new Date() }; + expect(userSchema.validate(validData).isValid).toBe(true); + + const validDataString = { id: 8, name: 'Frank', createdAt: '2025-02-14T14:23:00Z' }; + expect(userSchema.validate(validDataString).isValid).toBe(true); + + const invalidData = { id: 1, name: 'John Doe', createdAt: 'invalid-date-string' }; + expect(userSchema.validate(invalidData).isValid).toBe(false); + }); + + it('should validate that birthDate is a Date instance or valid string if provided', () => { + const validData = { id: 2, name: 'Jane Doe', createdAt: new Date(), birthDate: new Date('1992-02-02') }; + expect(userSchema.validate(validData).isValid).toBe(true); + + const validDataString = { id: 9, name: 'Grace', createdAt: new Date(), birthDate: '1992-02-02' }; + expect(userSchema.validate(validDataString).isValid).toBe(true); + + // Only testing invalid format, should be YYYY-MM-DD. + // This can also be incorrect because February doesn't have 30 days. + const invalidData = { id: 2, name: 'Jane Doe', createdAt: new Date(), birthDate: '1992/02/30' }; + expect(userSchema.validate(invalidData).isValid).toBe(false); + }); + + it('should validate that appointmentTime is a string in HH:MM:SS format if provided', () => { + const validData = { id: 3, name: 'Alice', createdAt: new Date(), appointmentTime: '09:15:30' }; + expect(userSchema.validate(validData).isValid).toBe(true); + + const invalidData = { id: 3, name: 'Alice', createdAt: new Date(), appointmentTime: '9:15' }; + expect(userSchema.validate(invalidData).isValid).toBe(false); + }); + + it('should validate that lastLogin is a Date instance or valid string if provided', () => { + const validData = { id: 4, name: 'Bob', createdAt: new Date(), lastLogin: new Date() }; + expect(userSchema.validate(validData).isValid).toBe(true); + + const validDataString = { id: 10, name: 'Hannah', createdAt: new Date(), lastLogin: '2023-10-10T10:00:00Z' }; + expect(userSchema.validate(validDataString).isValid).toBe(true); + + const invalidData = { id: 4, name: 'Bob', createdAt: new Date(), lastLogin: 'invalid-date-time-string' }; + expect(userSchema.validate(invalidData).isValid).toBe(false); + }); + }); +}); diff --git a/packages/radfish/storage/Store.js b/packages/radfish/storage/Store.js new file mode 100644 index 00000000..d72bb003 --- /dev/null +++ b/packages/radfish/storage/Store.js @@ -0,0 +1,30 @@ +import Connector from "./Connector"; + +class Store { + constructor ({ name, connector }) { + this.name = name; + + // Instantiate store connector + this.connector = null; + + if (connector && connector instanceof Connector) { + this.connector = connector; + } + } + + async open() { + // TODO: Use engine + console.warn('Store.open() is not implemented'); + } + + async close() { + // TODO: Use engine + console.warn('Store.close() is not implemented'); + } + + getCollection(name) { + return this.connector.collections[name]; + } +} + +export default Store; \ No newline at end of file diff --git a/packages/radfish/storage/Store.spec.js b/packages/radfish/storage/Store.spec.js new file mode 100644 index 00000000..3754c325 --- /dev/null +++ b/packages/radfish/storage/Store.spec.js @@ -0,0 +1,59 @@ +import Store from './Store'; +import Schema from './Schema'; +import Collection from './Collection'; +import Connector from './Connector'; + +describe('Store', () => { + let store; + let mockConnector; + + beforeEach(() => { + // Create a mock connector + mockConnector = new Connector({ + initialize: jest.fn().mockResolvedValue(undefined) + }); + + // Mock collections property + mockConnector.collections = { + User: { name: 'User' } + }; + + // Create a store with the mock connector + store = new Store({ + name: 'TestStore', + connector: mockConnector + }); + }); + + it('should create a store with a connector', () => { + expect(store.name).toBe('TestStore'); + expect(store.connector).toBe(mockConnector); + }); + + describe('getCollection', () => { + it('should return a collection by name', () => { + const collection = store.getCollection('User'); + expect(collection).toEqual({ name: 'User' }); + }); + }); + + describe('open', () => { + it('should log a warning', async () => { + console.warn = jest.fn(); + + await store.open(); + + expect(console.warn).toHaveBeenCalledWith('Store.open() is not implemented'); + }); + }); + + describe('close', () => { + it('should log a warning', async () => { + console.warn = jest.fn(); + + await store.close(); + + expect(console.warn).toHaveBeenCalledWith('Store.close() is not implemented'); + }); + }); +}); \ No newline at end of file diff --git a/packages/radfish/storage/index.js b/packages/radfish/storage/index.js new file mode 100644 index 00000000..1ee0ff90 --- /dev/null +++ b/packages/radfish/storage/index.js @@ -0,0 +1,5 @@ +export { default as Schema } from "./Schema"; +export { default as Connector } from "./Connector"; +export { default as Store } from "./Store"; +export { default as LocalStorageConnector } from "./LocalStorageConnector"; +export { default as IndexedDBConnector } from "./IndexedDBConnector"; \ No newline at end of file diff --git a/templates/react-javascript/package-lock.json b/templates/react-javascript/package-lock.json index 5c6c3ff8..136aef1b 100644 --- a/templates/react-javascript/package-lock.json +++ b/templates/react-javascript/package-lock.json @@ -7,7 +7,7 @@ "": { "version": "0.11.1", "dependencies": { - "@nmfs-radfish/radfish": "^1.0.0", + "@nmfs-radfish/radfish": "^1.1.0", "@nmfs-radfish/react-radfish": "^1.0.0", "@testing-library/user-event": "^14.5.2", "@trussworks/react-uswds": "^9.0.0", @@ -2806,9 +2806,9 @@ } }, "node_modules/@nmfs-radfish/radfish": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@nmfs-radfish/radfish/-/radfish-1.0.0.tgz", - "integrity": "sha512-Mqz2FDAYg8WiIPFftcA4Ch+VVfcyyLz91Gk0nIB966wsWfo36+z5x66klyu1BwsgS/OfMlsxPI4dSpQ3wQ1RIA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@nmfs-radfish/radfish/-/radfish-1.1.0.tgz", + "integrity": "sha512-U4eW4yui4kkb6mV0ImRik3DMPzn90UxVSeEInhen+AFILP5eRhumwlUYvyAhA5pnbOS7zD15azWehiATsw+baw==", "dependencies": { "dexie": "4.0.x", "msw": "^2.3.1", @@ -5101,9 +5101,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001667", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001667.tgz", - "integrity": "sha512-7LTwJjcRkzKFmtqGsibMeuXmvFDfZq/nzIjnmgCGzKKRVzjD72selLDK1oPF/Oxzmt4fNcPvTDvGqSDG4tCALw==", + "version": "1.0.30001718", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001718.tgz", + "integrity": "sha512-AflseV1ahcSunK53NfEs9gFWgOEmzr0f+kaMFA4xiLZlr9Hzt7HxcSpIFcnNCUkz6R6dWKa54rUz3HUmI3nVcw==", "dev": true, "funding": [ { diff --git a/templates/react-javascript/package.json b/templates/react-javascript/package.json index 182c84c6..c254dec2 100644 --- a/templates/react-javascript/package.json +++ b/templates/react-javascript/package.json @@ -1,7 +1,7 @@ { "version": "0.11.1", "dependencies": { - "@nmfs-radfish/radfish": "^1.0.0", + "@nmfs-radfish/radfish": "^1.1.0", "@nmfs-radfish/react-radfish": "^1.0.0", "@testing-library/user-event": "^14.5.2", "@trussworks/react-uswds": "^9.0.0", diff --git a/templates/react-javascript/public/mockServiceWorker.js b/templates/react-javascript/public/mockServiceWorker.js deleted file mode 100644 index 89bce291..00000000 --- a/templates/react-javascript/public/mockServiceWorker.js +++ /dev/null @@ -1,295 +0,0 @@ -/* eslint-disable */ -/* tslint:disable */ - -/** - * Mock Service Worker. - * @see https://github.com/mswjs/msw - * - Please do NOT modify this file. - * - Please do NOT serve this file on production. - */ - -const PACKAGE_VERSION = '2.6.5' -const INTEGRITY_CHECKSUM = 'ca7800994cc8bfb5eb961e037c877074' -const IS_MOCKED_RESPONSE = Symbol('isMockedResponse') -const activeClientIds = new Set() - -self.addEventListener('install', function () { - self.skipWaiting() -}) - -self.addEventListener('activate', function (event) { - event.waitUntil(self.clients.claim()) -}) - -self.addEventListener('message', async function (event) { - const clientId = event.source.id - - if (!clientId || !self.clients) { - return - } - - const client = await self.clients.get(clientId) - - if (!client) { - return - } - - const allClients = await self.clients.matchAll({ - type: 'window', - }) - - switch (event.data) { - case 'KEEPALIVE_REQUEST': { - sendToClient(client, { - type: 'KEEPALIVE_RESPONSE', - }) - break - } - - case 'INTEGRITY_CHECK_REQUEST': { - sendToClient(client, { - type: 'INTEGRITY_CHECK_RESPONSE', - payload: { - packageVersion: PACKAGE_VERSION, - checksum: INTEGRITY_CHECKSUM, - }, - }) - break - } - - case 'MOCK_ACTIVATE': { - activeClientIds.add(clientId) - - sendToClient(client, { - type: 'MOCKING_ENABLED', - payload: { - client: { - id: client.id, - frameType: client.frameType, - }, - }, - }) - break - } - - case 'MOCK_DEACTIVATE': { - activeClientIds.delete(clientId) - break - } - - case 'CLIENT_CLOSED': { - activeClientIds.delete(clientId) - - const remainingClients = allClients.filter((client) => { - return client.id !== clientId - }) - - // Unregister itself when there are no more clients - if (remainingClients.length === 0) { - self.registration.unregister() - } - - break - } - } -}) - -self.addEventListener('fetch', function (event) { - const { request } = event - - // Bypass navigation requests. - if (request.mode === 'navigate') { - return - } - - // Opening the DevTools triggers the "only-if-cached" request - // that cannot be handled by the worker. Bypass such requests. - if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') { - return - } - - // Bypass all requests when there are no active clients. - // Prevents the self-unregistered worked from handling requests - // after it's been deleted (still remains active until the next reload). - if (activeClientIds.size === 0) { - return - } - - // Generate unique request ID. - const requestId = crypto.randomUUID() - event.respondWith(handleRequest(event, requestId)) -}) - -async function handleRequest(event, requestId) { - const client = await resolveMainClient(event) - const response = await getResponse(event, client, requestId) - - // Send back the response clone for the "response:*" life-cycle events. - // Ensure MSW is active and ready to handle the message, otherwise - // this message will pend indefinitely. - if (client && activeClientIds.has(client.id)) { - ;(async function () { - const responseClone = response.clone() - - sendToClient( - client, - { - type: 'RESPONSE', - payload: { - requestId, - isMockedResponse: IS_MOCKED_RESPONSE in response, - type: responseClone.type, - status: responseClone.status, - statusText: responseClone.statusText, - body: responseClone.body, - headers: Object.fromEntries(responseClone.headers.entries()), - }, - }, - [responseClone.body], - ) - })() - } - - return response -} - -// Resolve the main client for the given event. -// Client that issues a request doesn't necessarily equal the client -// that registered the worker. It's with the latter the worker should -// communicate with during the response resolving phase. -async function resolveMainClient(event) { - const client = await self.clients.get(event.clientId) - - if (activeClientIds.has(event.clientId)) { - return client - } - - if (client?.frameType === 'top-level') { - return client - } - - const allClients = await self.clients.matchAll({ - type: 'window', - }) - - return allClients - .filter((client) => { - // Get only those clients that are currently visible. - return client.visibilityState === 'visible' - }) - .find((client) => { - // Find the client ID that's recorded in the - // set of clients that have registered the worker. - return activeClientIds.has(client.id) - }) -} - -async function getResponse(event, client, requestId) { - const { request } = event - - // Clone the request because it might've been already used - // (i.e. its body has been read and sent to the client). - const requestClone = request.clone() - - function passthrough() { - // Cast the request headers to a new Headers instance - // so the headers can be manipulated with. - const headers = new Headers(requestClone.headers) - - // Remove the "accept" header value that marked this request as passthrough. - // This prevents request alteration and also keeps it compliant with the - // user-defined CORS policies. - headers.delete('accept', 'msw/passthrough') - - return fetch(requestClone, { headers }) - } - - // Bypass mocking when the client is not active. - if (!client) { - return passthrough() - } - - // Bypass initial page load requests (i.e. static assets). - // The absence of the immediate/parent client in the map of the active clients - // means that MSW hasn't dispatched the "MOCK_ACTIVATE" event yet - // and is not ready to handle requests. - if (!activeClientIds.has(client.id)) { - return passthrough() - } - - // Notify the client that a request has been intercepted. - const requestBuffer = await request.arrayBuffer() - const clientMessage = await sendToClient( - client, - { - type: 'REQUEST', - payload: { - id: requestId, - url: request.url, - mode: request.mode, - method: request.method, - headers: Object.fromEntries(request.headers.entries()), - cache: request.cache, - credentials: request.credentials, - destination: request.destination, - integrity: request.integrity, - redirect: request.redirect, - referrer: request.referrer, - referrerPolicy: request.referrerPolicy, - body: requestBuffer, - keepalive: request.keepalive, - }, - }, - [requestBuffer], - ) - - switch (clientMessage.type) { - case 'MOCK_RESPONSE': { - return respondWithMock(clientMessage.data) - } - - case 'PASSTHROUGH': { - return passthrough() - } - } - - return passthrough() -} - -function sendToClient(client, message, transferrables = []) { - return new Promise((resolve, reject) => { - const channel = new MessageChannel() - - channel.port1.onmessage = (event) => { - if (event.data && event.data.error) { - return reject(event.data.error) - } - - resolve(event.data) - } - - client.postMessage( - message, - [channel.port2].concat(transferrables.filter(Boolean)), - ) - }) -} - -async function respondWithMock(response) { - // Setting response status code to 0 is a no-op. - // However, when responding with a "Response.error()", the produced Response - // instance will have status code set to 0. Since it's not possible to create - // a Response instance with status code 0, handle that use-case separately. - if (response.status === 0) { - return Response.error() - } - - const mockedResponse = new Response(response.body, response) - - Reflect.defineProperty(mockedResponse, IS_MOCKED_RESPONSE, { - value: true, - enumerable: true, - }) - - return mockedResponse -} diff --git a/templates/react-javascript/src/index.jsx b/templates/react-javascript/src/index.jsx index 71e429fa..5ed01d1f 100644 --- a/templates/react-javascript/src/index.jsx +++ b/templates/react-javascript/src/index.jsx @@ -6,17 +6,7 @@ import { Application } from "@nmfs-radfish/radfish"; const root = ReactDOM.createRoot(document.getElementById("root")); -const app = new Application({ - serviceWorker: { - url: - import.meta.env.MODE === "development" - ? "/mockServiceWorker.js" - : "/service-worker.js", - }, - mocks: { - handlers: import("../mocks/browser.js"), - }, -}); +const app = new Application(); app.on("ready", async () => { root.render( diff --git a/templates/react-javascript/src/service-worker.js b/templates/react-javascript/src/service-worker.js index 533221c1..771cafd4 100644 --- a/templates/react-javascript/src/service-worker.js +++ b/templates/react-javascript/src/service-worker.js @@ -10,12 +10,17 @@ import { clientsClaim } from "workbox-core"; import { ExpirationPlugin } from "workbox-expiration"; -import { precacheAndRoute, createHandlerBoundToURL } from "workbox-precaching"; +import { + cleanupOutdatedCaches, + precacheAndRoute, + createHandlerBoundToURL, +} from "workbox-precaching"; import { registerRoute } from "workbox-routing"; import { StaleWhileRevalidate } from "workbox-strategies"; clientsClaim(); +cleanupOutdatedCaches(); // Precache all of the assets generated by your build process. // Their URLs are injected into the manifest variable below. // This variable must be present somewhere in your service worker file, diff --git a/templates/react-javascript/vite.config.js b/templates/react-javascript/vite.config.js index bd155c00..e5d836a4 100644 --- a/templates/react-javascript/vite.config.js +++ b/templates/react-javascript/vite.config.js @@ -20,6 +20,11 @@ export default defineConfig((env) => ({ short_name: "RADFish", name: "RADFish React Boilerplate", icons: [ + { + src: "icons/radfish.ico", + sizes: "512x512 256x256 144x144 64x64 32x32 24x24 16x16", + type: "image/x-icon", + }, { src: "icons/radfish-144.ico", sizes: "144x144 64x64 32x32 24x24 16x16",