-
Get Started
+
+ By continuing you agree to our
+ Terms & Conditions
+
+ and
+ Privacy Policy.
+
+
+ Get Started
+
+
+ Already have a pre-verification code?
+
-
- Welcome to Web 3 Data Spaces
- Your eName is more than a nameโit's your unique digital passport. One constant identifier that travels with you across the internet, connecting your real-world self to the digital universe.
-
- Next
-
-
\ No newline at end of file
+

+ {#if error}
+
+ {error}
+
+ {/if}
+ {#if loading}
+
+
+
+
Generating your eName
+
+
+ {:else if preVerified}
+ {#if verificationSuccess}
+
Verification Successful!
+
Enter Demo Name for your ePassport
+
+
+ Continue
+
+ {:else}
+
+ Welcome to Web 3.0 Data Spaces
+
+
Enter Verification Code
+
+
+ Next
+
+ {/if}
+ {:else}
+
+ Your Digital Self begins with the Real You
+
+
+ In the Web 3.0 Data Space, identity is linked to reality. We begin
+ by verifying your real-world passport, which serves as the
+ foundation for issuing your secure ePassport. At the same time, we
+ generate your eName โ a unique digital identifier โ and create your
+ eVault to store and protect your personal data.
+
+
+ Next
+
+ {/if}
+
diff --git a/infrastructure/eid-wallet/src/routes/(auth)/register/+page.svelte b/infrastructure/eid-wallet/src/routes/(auth)/register/+page.svelte
index 49415099..a3aaf3f1 100644
--- a/infrastructure/eid-wallet/src/routes/(auth)/register/+page.svelte
+++ b/infrastructure/eid-wallet/src/routes/(auth)/register/+page.svelte
@@ -1,150 +1,205 @@
{#if firstStep}
-
+
-
-
- Your PIN does not match, try again.
+
+ {#snippet subtitle()}
+ Enter your 4-digit PIN code
+ {/snippet}
+
+
+
+
+ Your PIN does not match, try again.
+
- Confirm
+
+ Confirm
+
{:else}
-
+
-
-
+
+ {#snippet subtitle()}
+ Confirm by entering PIN again
+ {/snippet}
+
+
- Confirm
+ Confirm
{/if}
-
{#if !isBiometricScreen}
-
+
-
Pin code set!
-
Your PIN has been created. Youโll use it to access your digital entity securely.
+
PIN has been set!
+
+ Your PIN has been created. Youโll use it to access your digital
+ entity securely.
+
Next
{:else}
-
+
{isBiometricsAdded ? "Youโre all set!" : "Add biometrics"}
-
{ isBiometricsAdded ? "Your biometrics have been successfully added." : "Use your fingerprint or face recognition for faster, more secure logins."}
+
+ {isBiometricsAdded
+ ? "Your biometrics have been successfully added."
+ : "Use your fingerprint or face recognition for faster, more secure logins."}
+
{#if !isBiometricsAdded}
-
Skip
+
Skip
-
Set up
-
Biometrics unavailable.
+
Set up
+
+ Biometrics unavailable.
+
{:else}
-
Continue
+
Continue
{/if}
{/if}
diff --git a/infrastructure/eid-wallet/src/routes/(auth)/review/+page.svelte b/infrastructure/eid-wallet/src/routes/(auth)/review/+page.svelte
index 0221db0d..c44c7ed1 100644
--- a/infrastructure/eid-wallet/src/routes/(auth)/review/+page.svelte
+++ b/infrastructure/eid-wallet/src/routes/(auth)/review/+page.svelte
@@ -1,35 +1,35 @@
-
+
+ {#snippet subtitle()}
+ This identifier is permanently yours, and it stays with you for
+ your whole life.
+ {/snippet}
+
Next
-
diff --git a/infrastructure/eid-wallet/src/routes/(auth)/verify/+page.svelte b/infrastructure/eid-wallet/src/routes/(auth)/verify/+page.svelte
index 7327f714..c3e07115 100644
--- a/infrastructure/eid-wallet/src/routes/(auth)/verify/+page.svelte
+++ b/infrastructure/eid-wallet/src/routes/(auth)/verify/+page.svelte
@@ -1,171 +1,185 @@
-
+
+ {#snippet subtitle()}
+ Get your passport ready. Youโll be directed to present your
+ passport and take a quick selfie.
+ {/snippet}
+
I'm ready
- {#if $verifStep === 0}
-
- {:else if $verifStep === 1}
-
- {:else if loading}
-
-
-
-
Generating your eName
-
-
- {:else}
-
- {#if $status === "approved"}
-
-
Your verification was a success
-
You can now continue on to create your eName
+
+
+ {#if $status !== "declined"}
+ {$status === "approved"
+ ? "Continue"
+ : "Retry"}
+ {/if}
+
+ {/if}
+
diff --git a/infrastructure/eid-wallet/src/routes/(auth)/verify/steps/passport.svelte b/infrastructure/eid-wallet/src/routes/(auth)/verify/steps/passport.svelte
index c31d933f..e62f6ab4 100644
--- a/infrastructure/eid-wallet/src/routes/(auth)/verify/steps/passport.svelte
+++ b/infrastructure/eid-wallet/src/routes/(auth)/verify/steps/passport.svelte
@@ -1,85 +1,138 @@
diff --git a/infrastructure/eid-wallet/src/routes/(auth)/verify/steps/selfie.svelte b/infrastructure/eid-wallet/src/routes/(auth)/verify/steps/selfie.svelte
index 4b6b765d..7dab1f2e 100644
--- a/infrastructure/eid-wallet/src/routes/(auth)/verify/steps/selfie.svelte
+++ b/infrastructure/eid-wallet/src/routes/(auth)/verify/steps/selfie.svelte
@@ -1,22 +1,27 @@
diff --git a/infrastructure/eid-wallet/src/routes/+layout.svelte b/infrastructure/eid-wallet/src/routes/+layout.svelte
index 0806cb78..be5976d8 100644
--- a/infrastructure/eid-wallet/src/routes/+layout.svelte
+++ b/infrastructure/eid-wallet/src/routes/+layout.svelte
@@ -1,108 +1,111 @@
{#if showSplashScreen}
{:else}
-
+
+
{@render children?.()}
{/if}
diff --git a/infrastructure/eid-wallet/src/routes/+page.svelte b/infrastructure/eid-wallet/src/routes/+page.svelte
index 0936edc9..e11d9efa 100644
--- a/infrastructure/eid-wallet/src/routes/+page.svelte
+++ b/infrastructure/eid-wallet/src/routes/+page.svelte
@@ -1,36 +1,36 @@
diff --git a/infrastructure/evault-core/package.json b/infrastructure/evault-core/package.json
index b4d470a6..85535a2a 100644
--- a/infrastructure/evault-core/package.json
+++ b/infrastructure/evault-core/package.json
@@ -1,43 +1,44 @@
{
- "name": "evault-core",
- "version": "0.1.0",
- "description": "",
- "main": "index.js",
- "scripts": {
- "test": "vitest --config vitest.config.ts",
- "build": "tsc",
- "dev": "node --watch --import tsx src/evault.ts",
- "start": "node ./dist/evault.js"
- },
- "packageManager": "pnpm@10.6.5",
- "keywords": [],
- "author": "",
- "license": "ISC",
- "devDependencies": {
- "@types/json-schema": "^7.0.15",
- "@types/node": "^22.13.10",
- "dotenv": "^16.5.0",
- "testcontainers": "^10.24.2",
- "tsx": "^4.19.3",
- "typescript": "^5.8.3",
- "uuid": "^11.1.0",
- "vitest": "^3.0.9"
- },
- "dependencies": {
- "@fastify/formbody": "^8.0.2",
- "@fastify/swagger": "^8.14.0",
- "@fastify/swagger-ui": "^3.0.0",
- "@testcontainers/neo4j": "^10.24.2",
- "axios": "^1.6.7",
- "fastify": "^4.26.2",
- "graphql": "^16.10.0",
- "graphql-type-json": "^0.3.2",
- "graphql-voyager": "^2.1.0",
- "graphql-yoga": "^5.13.4",
- "json-schema": "^0.4.0",
- "multiformats": "^13.3.2",
- "neo4j-driver": "^5.28.1",
- "tweetnacl": "^1.0.3",
- "w3id": "workspace:*"
- }
+ "name": "evault-core",
+ "version": "0.1.0",
+ "description": "",
+ "main": "index.js",
+ "scripts": {
+ "test": "vitest --config vitest.config.ts",
+ "build": "tsc",
+ "dev": "node --watch --import tsx src/evault.ts",
+ "start": "node ./dist/evault.js"
+ },
+ "packageManager": "pnpm@10.6.5",
+ "keywords": [],
+ "author": "",
+ "license": "ISC",
+ "devDependencies": {
+ "@types/json-schema": "^7.0.15",
+ "@types/node": "^22.13.10",
+ "dotenv": "^16.5.0",
+ "testcontainers": "^10.24.2",
+ "tsx": "^4.19.3",
+ "typescript": "^5.8.3",
+ "uuid": "^11.1.0",
+ "vitest": "^3.0.9"
+ },
+ "dependencies": {
+ "@fastify/formbody": "^8.0.2",
+ "@fastify/swagger": "^8.14.0",
+ "@fastify/swagger-ui": "^3.0.0",
+ "@testcontainers/neo4j": "^10.24.2",
+ "axios": "^1.6.7",
+ "fastify": "^4.26.2",
+ "graphql": "^16.10.0",
+ "graphql-type-json": "^0.3.2",
+ "graphql-voyager": "^2.1.0",
+ "graphql-yoga": "^5.13.4",
+ "jose": "^5.2.2",
+ "json-schema": "^0.4.0",
+ "multiformats": "^13.3.2",
+ "neo4j-driver": "^5.28.1",
+ "tweetnacl": "^1.0.3",
+ "w3id": "workspace:*"
+ }
}
diff --git a/infrastructure/evault-core/src/db/db.service.spec.ts b/infrastructure/evault-core/src/db/db.service.spec.ts
index 4ae8109e..ab09324b 100644
--- a/infrastructure/evault-core/src/db/db.service.spec.ts
+++ b/infrastructure/evault-core/src/db/db.service.spec.ts
@@ -52,6 +52,7 @@ describe("DbService (integration)", () => {
const fetched = await service.findMetaEnvelopeById(id);
expect(fetched).toBeDefined();
+ if (!fetched) return;
expect(fetched.id).toBeDefined();
expect(fetched.ontology).toBe("TestTypes");
expect(fetched.acl).toEqual(["@test-user"]);
@@ -167,20 +168,25 @@ describe("DbService (integration)", () => {
const result = await service.findMetaEnvelopeById(
stored.metaEnvelope.id,
);
+ if (!result) return;
const targetEnvelope = result.envelopes.find(
(e: Envelope) => e.ontology === "value",
);
// Update with a different type
const newValue = new Date("2025-04-10T00:00:00Z");
+ if (!targetEnvelope) return;
await service.updateEnvelopeValue(targetEnvelope.id, newValue);
const updated = await service.findMetaEnvelopeById(
stored.metaEnvelope.id,
);
+ if (!updated) return;
const updatedValue = updated.envelopes.find(
(e: Envelope) => e.id === targetEnvelope.id,
);
+
+ if (!updatedValue) return;
expect(updatedValue.value).toBeInstanceOf(Date);
expect(updatedValue.value.toISOString()).toBe(
"2025-04-10T00:00:00.000Z",
diff --git a/infrastructure/evault-core/src/db/db.service.ts b/infrastructure/evault-core/src/db/db.service.ts
index a6efadb9..dd51d71f 100644
--- a/infrastructure/evault-core/src/db/db.service.ts
+++ b/infrastructure/evault-core/src/db/db.service.ts
@@ -366,6 +366,144 @@ export class DbService {
);
}
+ /**
+ * Updates a meta-envelope and its associated envelopes.
+ * @param id - The ID of the meta-envelope to update
+ * @param meta - The updated meta-envelope data
+ * @param acl - The updated access control list
+ * @returns The updated meta-envelope and its envelopes
+ */
+ async updateMetaEnvelopeById<
+ T extends Record = Record,
+ >(
+ id: string,
+ meta: Omit, "id">,
+ acl: string[],
+ ): Promise> {
+ try {
+ // First, get the existing meta-envelope to find existing envelopes
+ const existing = await this.findMetaEnvelopeById(id);
+ if (!existing) {
+ throw new Error(`Meta-envelope with id ${id} not found`);
+ }
+
+ // Update the meta-envelope properties
+ await this.runQuery(
+ `
+ MATCH (m:MetaEnvelope { id: $id })
+ SET m.ontology = $ontology, m.acl = $acl
+ `,
+ { id, ontology: meta.ontology, acl }
+ );
+
+ const createdEnvelopes: Envelope[] = [];
+ let counter = 0;
+
+ // For each field in the new payload
+ for (const [key, value] of Object.entries(meta.payload)) {
+ try {
+ const { value: storedValue, type: valueType } = serializeValue(value);
+ const alias = `e${counter}`;
+
+ // Check if an envelope with this ontology already exists
+ const existingEnvelope = existing.envelopes.find(e => e.ontology === key);
+
+ if (existingEnvelope) {
+ // Update existing envelope
+ await this.runQuery(
+ `
+ MATCH (e:Envelope { id: $envelopeId })
+ SET e.value = $newValue, e.valueType = $valueType
+ `,
+ {
+ envelopeId: existingEnvelope.id,
+ newValue: storedValue,
+ valueType,
+ }
+ );
+
+ createdEnvelopes.push({
+ id: existingEnvelope.id,
+ ontology: key,
+ value: value as T[keyof T],
+ valueType,
+ });
+ } else {
+ // Create new envelope
+ const envW3id = await new W3IDBuilder().build();
+ const envelopeId = envW3id.id;
+
+ await this.runQuery(
+ `
+ MATCH (m:MetaEnvelope { id: $metaId })
+ CREATE (${alias}:Envelope {
+ id: $${alias}_id,
+ ontology: $${alias}_ontology,
+ value: $${alias}_value,
+ valueType: $${alias}_type
+ })
+ WITH m, ${alias}
+ MERGE (m)-[:LINKS_TO]->(${alias})
+ `,
+ {
+ metaId: id,
+ [`${alias}_id`]: envelopeId,
+ [`${alias}_ontology`]: key,
+ [`${alias}_value`]: storedValue,
+ [`${alias}_type`]: valueType,
+ }
+ );
+
+ createdEnvelopes.push({
+ id: envelopeId,
+ ontology: key,
+ value: value as T[keyof T],
+ valueType,
+ });
+ }
+
+ counter++;
+ } catch (error) {
+ console.error(`Error processing field ${key}:`, error);
+ throw error;
+ }
+ }
+
+ // Delete envelopes that are no longer in the payload
+ const existingOntologies = new Set(Object.keys(meta.payload));
+ const envelopesToDelete = existing.envelopes.filter(
+ e => !existingOntologies.has(e.ontology)
+ );
+
+ for (const envelope of envelopesToDelete) {
+ try {
+ await this.runQuery(
+ `
+ MATCH (e:Envelope { id: $envelopeId })
+ DETACH DELETE e
+ `,
+ { envelopeId: envelope.id }
+ );
+ } catch (error) {
+ console.error(`Error deleting envelope ${envelope.id}:`, error);
+ throw error;
+ }
+ }
+
+ return {
+ metaEnvelope: {
+ id,
+ ontology: meta.ontology,
+ acl,
+ },
+ envelopes: createdEnvelopes,
+ };
+ } catch (error) {
+ console.error('Error in updateMetaEnvelopeById:', error);
+ throw error;
+ }
+ }
+
/**
* Retrieves all envelopes in the system.
* @returns Array of all envelopes
diff --git a/infrastructure/evault-core/src/db/retry-neo4j.ts b/infrastructure/evault-core/src/db/retry-neo4j.ts
new file mode 100644
index 00000000..d08f5178
--- /dev/null
+++ b/infrastructure/evault-core/src/db/retry-neo4j.ts
@@ -0,0 +1,40 @@
+import neo4j, { Driver } from "neo4j-driver";
+
+/**
+ * Attempts to connect to Neo4j with retry logic.
+ * @param uri - Neo4j URI
+ * @param user - Username
+ * @param password - Password
+ * @param maxRetries - Maximum number of retries (default: 10)
+ * @param delayMs - Delay between retries in ms (default: 3000)
+ * @returns Connected Neo4j Driver
+ * @throws Error if connection fails after all retries
+ */
+export async function connectWithRetry(
+ uri: string,
+ user: string,
+ password: string,
+ maxRetries = 30,
+ delayMs = 5000,
+): Promise {
+ let attempt = 0;
+ while (attempt < maxRetries) {
+ try {
+ const driver = neo4j.driver(
+ uri,
+ neo4j.auth.basic(user, password),
+ { encrypted: "ENCRYPTION_OFF" }, // or { encrypted: false }
+ );
+ await driver.getServerInfo();
+ console.log("Connected to Neo4j!");
+ return driver;
+ } catch (err: any) {
+ attempt++;
+ console.warn(
+ `Neo4j connection attempt ${attempt} failed: ${err.message}. Retrying in ${delayMs}ms...`,
+ );
+ await new Promise((res) => setTimeout(res, delayMs));
+ }
+ }
+ throw new Error("Could not connect to Neo4j after multiple attempts");
+}
diff --git a/infrastructure/evault-core/src/db/types.ts b/infrastructure/evault-core/src/db/types.ts
index 1b79fd52..b76f9808 100644
--- a/infrastructure/evault-core/src/db/types.ts
+++ b/infrastructure/evault-core/src/db/types.ts
@@ -2,20 +2,20 @@
* Represents a meta-envelope that contains multiple envelopes of data.
*/
export type MetaEnvelope = Record> =
- {
- ontology: string;
- payload: T;
- acl: string[];
- };
+ {
+ ontology: string;
+ payload: T;
+ acl: string[];
+ };
/**
* Represents an individual envelope containing a single piece of data.
*/
export type Envelope = {
- id: string;
- value: T;
- ontology: string;
- valueType: string;
+ id: string;
+ value: T;
+ ontology: string;
+ valueType: string;
};
/**
@@ -23,34 +23,34 @@ export type Envelope = {
* Includes the parsed payload structure reconstructed from the envelopes.
*/
export type MetaEnvelopeResult<
- T extends Record = Record,
+ T extends Record = Record
> = {
- id: string;
- ontology: string;
- acl: string[];
- envelopes: Envelope[];
- parsed: T;
+ id: string;
+ ontology: string;
+ acl: string[];
+ envelopes: Envelope[];
+ parsed: T;
};
/**
* Result type for storing a new meta-envelope.
*/
export type StoreMetaEnvelopeResult<
- T extends Record = Record,
+ T extends Record = Record
> = {
- metaEnvelope: {
- id: string;
- ontology: string;
- acl: string[];
- };
- envelopes: Envelope[];
+ metaEnvelope: {
+ id: string;
+ ontology: string;
+ acl: string[];
+ };
+ envelopes: Envelope[];
};
/**
* Result type for searching meta-envelopes.
*/
export type SearchMetaEnvelopesResult<
- T extends Record = Record,
+ T extends Record = Record
> = MetaEnvelopeResult[];
/**
diff --git a/infrastructure/evault-core/src/evault.ts b/infrastructure/evault-core/src/evault.ts
index 57b220ad..d30864df 100644
--- a/infrastructure/evault-core/src/evault.ts
+++ b/infrastructure/evault-core/src/evault.ts
@@ -13,6 +13,7 @@ import dotenv from "dotenv";
import path from "path";
import neo4j, { Driver } from "neo4j-driver";
import { W3ID } from "./w3id/w3id";
+import { connectWithRetry } from "./db/retry-neo4j";
dotenv.config({ path: path.resolve(__dirname, "../../../.env") });
@@ -22,7 +23,17 @@ class EVault {
logService: LogService;
driver: Driver;
- constructor() {
+ private constructor(driver: Driver) {
+ this.driver = driver;
+ const dbService = new DbService(driver);
+ this.logService = new LogService(driver);
+ this.graphqlServer = new GraphQLServer(dbService);
+ this.server = fastify({
+ logger: true,
+ });
+ }
+
+ static async create(): Promise {
const uri = process.env.NEO4J_URI || "bolt://localhost:7687";
const user = process.env.NEO4J_USER || "neo4j";
const password = process.env.NEO4J_PASSWORD || "neo4j";
@@ -37,15 +48,8 @@ class EVault {
);
}
- this.driver = neo4j.driver(uri, neo4j.auth.basic(user, password));
-
- const dbService = new DbService(this.driver);
- this.logService = new LogService(this.driver);
- this.graphqlServer = new GraphQLServer(dbService);
-
- this.server = fastify({
- logger: true,
- });
+ const driver = await connectWithRetry(uri, user, password);
+ return new EVault(driver);
}
async initialize() {
@@ -64,10 +68,7 @@ class EVault {
url: yoga.graphqlEndpoint,
method: ["GET", "POST", "OPTIONS"],
handler: (req, reply) =>
- yoga.handleNodeRequestAndResponse(req, reply, {
- req,
- reply,
- }),
+ yoga.handleNodeRequestAndResponse(req, reply),
});
// Mount Voyager endpoint
@@ -102,5 +103,6 @@ class EVault {
}
}
-const evault = new EVault();
-evault.start().catch(console.error);
+EVault.create()
+ .then(evault => evault.start())
+ .catch(console.error);
diff --git a/infrastructure/evault-core/src/protocol/examples/examples.ts b/infrastructure/evault-core/src/protocol/examples/examples.ts
index 455f16b4..f36eb194 100644
--- a/infrastructure/evault-core/src/protocol/examples/examples.ts
+++ b/infrastructure/evault-core/src/protocol/examples/examples.ts
@@ -79,7 +79,7 @@ export const exampleQueries = `
# }
################################################################################
-# โ๏ธ 5. Update a Single Envelopeโs Value
+# โ๏ธ 5. Update a Single Envelope's Value
################################################################################
# mutation {
@@ -98,7 +98,39 @@ export const exampleQueries = `
# }
################################################################################
-# ๐ฆ 7. List All Envelopes in the System
+# ๐ 7. Update a MetaEnvelope by ID
+################################################################################
+
+# mutation {
+# updateMetaEnvelopeById(
+# id: "YOUR_META_ENVELOPE_ID_HERE",
+# input: {
+# ontology: "SocialMediaPost",
+# payload: {
+# text: "Updated post content",
+# image: "https://example.com/new-pic.jpg",
+# dateCreated: "2025-04-10T10:00:00Z",
+# userLikes: ["@user1", "@user2", "@user3"]
+# },
+# acl: ["@d1fa5cb1-6178-534b-a096-59794d485f65"]
+# }
+# ) {
+# metaEnvelope {
+# id
+# ontology
+# parsed
+# }
+# envelopes {
+# id
+# ontology
+# value
+# valueType
+# }
+# }
+# }
+
+################################################################################
+# ๐ฆ 8. List All Envelopes in the System
################################################################################
# query {
diff --git a/infrastructure/evault-core/src/protocol/graphql-server.ts b/infrastructure/evault-core/src/protocol/graphql-server.ts
index bde5adde..44ae7d88 100644
--- a/infrastructure/evault-core/src/protocol/graphql-server.ts
+++ b/infrastructure/evault-core/src/protocol/graphql-server.ts
@@ -7,121 +7,291 @@ import { DbService } from "../db/db.service";
import { VaultAccessGuard, VaultContext } from "./vault-access-guard";
import { GraphQLSchema } from "graphql";
import { exampleQueries } from "./examples/examples";
+import axios from "axios";
export class GraphQLServer {
- private db: DbService;
- private accessGuard: VaultAccessGuard;
- private schema: GraphQLSchema = createSchema({
- typeDefs,
- resolvers: {},
- });
- server?: Server;
-
- constructor(db: DbService) {
- this.db = db;
- this.accessGuard = new VaultAccessGuard(db);
- }
-
- public getSchema(): GraphQLSchema {
- return this.schema;
- }
-
- init() {
- const resolvers = {
- JSON: require("graphql-type-json"),
-
- Query: {
- getMetaEnvelopeById: this.accessGuard.middleware(
- (_: any, { id }: { id: string }) => {
- return this.db.findMetaEnvelopeById(id);
- }
- ),
- findMetaEnvelopesByOntology: this.accessGuard.middleware(
- (_: any, { ontology }: { ontology: string }) => {
- return this.db.findMetaEnvelopesByOntology(ontology);
- }
- ),
- searchMetaEnvelopes: this.accessGuard.middleware(
- (_: any, { ontology, term }: { ontology: string; term: string }) => {
- return this.db.findMetaEnvelopesBySearchTerm(ontology, term);
- }
- ),
- getAllEnvelopes: this.accessGuard.middleware(() => {
- return this.db.getAllEnvelopes();
- }),
- },
-
- Mutation: {
- storeMetaEnvelope: this.accessGuard.middleware(
- async (
- _: any,
- {
- input,
- }: {
- input: {
- ontology: string;
- payload: any;
- acl: string[];
- };
+ private db: DbService;
+ private accessGuard: VaultAccessGuard;
+ private schema: GraphQLSchema = createSchema({
+ typeDefs,
+ resolvers: {},
+ });
+ server?: Server;
+ constructor(db: DbService) {
+ this.db = db;
+ this.accessGuard = new VaultAccessGuard(db);
+ }
+
+ public getSchema(): GraphQLSchema {
+ return this.schema;
+ }
+
+ /**
+ * Fetches the list of active platforms from the registry
+ * @returns Promise - Array of platform URLs
+ */
+ private async getActivePlatforms(): Promise {
+ try {
+ if (!process.env.REGISTRY_URL) {
+ console.error("REGISTRY_URL is not set");
+ return [];
}
- ) => {
- const result = await this.db.storeMetaEnvelope(
- {
- ontology: input.ontology,
- payload: input.payload,
- acl: input.acl,
- },
- input.acl
+
+ const response = await axios.get(
+ new URL("/platforms", process.env.REGISTRY_URL).toString()
);
- return result;
- }
- ),
- deleteMetaEnvelope: this.accessGuard.middleware(
- async (_: any, { id }: { id: string }) => {
- await this.db.deleteMetaEnvelope(id);
- return true;
- }
- ),
- updateEnvelopeValue: this.accessGuard.middleware(
- async (
- _: any,
- { envelopeId, newValue }: { envelopeId: string; newValue: any }
- ) => {
- await this.db.updateEnvelopeValue(envelopeId, newValue);
- return true;
- }
- ),
- },
- };
-
- this.schema = createSchema({
- typeDefs,
- resolvers,
- });
+ return response.data;
+ } catch (error) {
+ console.error("Failed to fetch active platforms:", error);
+ return [];
+ }
+ }
+
+ /**
+ * Delivers webhooks to all platforms except the requesting one
+ * @param requestingPlatform - The platform that made the request (if any)
+ * @param webhookPayload - The payload to send to webhooks
+ */
+ private async deliverWebhooks(
+ requestingPlatform: string | null,
+ webhookPayload: any
+ ): Promise {
+ try {
+ const activePlatforms = await this.getActivePlatforms();
- const yoga = createYoga({
- schema: this.schema,
- graphqlEndpoint: "/graphql",
- graphiql: {
- defaultQuery: exampleQueries,
- },
- context: async ({ request }) => {
- const authHeader = request.headers.get("authorization") ?? "";
- const token = authHeader.replace("Bearer ", "");
-
- if (token) {
- const id = getJWTHeader(token).kid?.split("#")[0];
- return {
- currentUser: id ?? null,
- };
+ // Filter out the requesting platform
+ const platformsToNotify = activePlatforms.filter((platformUrl) => {
+ if (!requestingPlatform) return true;
+
+ // Normalize URLs for comparison
+ const normalizedPlatformUrl = new URL(platformUrl).toString();
+ const normalizedRequestingPlatform = new URL(
+ requestingPlatform
+ ).toString();
+
+ return normalizedPlatformUrl !== normalizedRequestingPlatform;
+ });
+ console.log("sending webhooks to ", platformsToNotify);
+
+ // Send webhooks to all other platforms
+ const webhookPromises = platformsToNotify.map(
+ async (platformUrl) => {
+ try {
+ const webhookUrl = new URL(
+ "/api/webhook",
+ platformUrl
+ ).toString();
+ await axios.post(webhookUrl, webhookPayload, {
+ headers: {
+ "Content-Type": "application/json",
+ },
+ timeout: 5000, // 5 second timeout
+ });
+ console.log(
+ `Webhook delivered successfully to ${platformUrl}`
+ );
+ } catch (error) {
+ console.error(
+ `Failed to deliver webhook to ${platformUrl}:`,
+ error
+ );
+ }
+ }
+ );
+
+ await Promise.allSettled(webhookPromises);
+ } catch (error) {
+ console.error("Error in webhook delivery:", error);
}
+ }
+
+ init() {
+ const resolvers = {
+ JSON: require("graphql-type-json"),
- return {
- currentUser: null,
+ Query: {
+ getMetaEnvelopeById: this.accessGuard.middleware(
+ (_: any, { id }: { id: string }) => {
+ return this.db.findMetaEnvelopeById(id);
+ }
+ ),
+ findMetaEnvelopesByOntology: this.accessGuard.middleware(
+ (_: any, { ontology }: { ontology: string }) => {
+ return this.db.findMetaEnvelopesByOntology(ontology);
+ }
+ ),
+ searchMetaEnvelopes: this.accessGuard.middleware(
+ (
+ _: any,
+ { ontology, term }: { ontology: string; term: string }
+ ) => {
+ return this.db.findMetaEnvelopesBySearchTerm(
+ ontology,
+ term
+ );
+ }
+ ),
+ getAllEnvelopes: this.accessGuard.middleware(() => {
+ return this.db.getAllEnvelopes();
+ }),
+ },
+
+ Mutation: {
+ storeMetaEnvelope: this.accessGuard.middleware(
+ async (
+ _: any,
+ {
+ input,
+ }: {
+ input: {
+ ontology: string;
+ payload: any;
+ acl: string[];
+ };
+ },
+ context: VaultContext
+ ) => {
+ const result = await this.db.storeMetaEnvelope(
+ {
+ ontology: input.ontology,
+ payload: input.payload,
+ acl: input.acl,
+ },
+ input.acl
+ );
+
+ // Deliver webhooks for create operation
+ const requestingPlatform =
+ context.tokenPayload?.platform || null;
+ const webhookPayload = {
+ id: result.metaEnvelope.id,
+ w3id: `@${process.env.W3ID}`,
+ data: input.payload,
+ schemaId: input.ontology,
+ };
+
+ /**
+ * To whoever who reads this in the future please don't
+ * remove this delay as this prevents a VERY horrible
+ * disgusting edge case, where if a platform's URL is
+ * not determinable the webhook to the same platform as
+ * the one who sent off the request gets sent and that
+ * is not an ideal case trust me I've suffered, it
+ * causes an absolutely beautiful error where you get
+ * stuck in what I like to call webhook ping-pong
+ */
+ setTimeout(() => {
+ this.deliverWebhooks(
+ requestingPlatform,
+ webhookPayload
+ );
+ }, 3_000);
+
+ return result;
+ }
+ ),
+ updateMetaEnvelopeById: this.accessGuard.middleware(
+ async (
+ _: any,
+ {
+ id,
+ input,
+ }: {
+ id: string;
+ input: {
+ ontology: string;
+ payload: any;
+ acl: string[];
+ };
+ },
+ context: VaultContext
+ ) => {
+ try {
+ const result = await this.db.updateMetaEnvelopeById(
+ id,
+ {
+ ontology: input.ontology,
+ payload: input.payload,
+ acl: input.acl,
+ },
+ input.acl
+ );
+
+ // Deliver webhooks for update operation
+ const requestingPlatform =
+ context.tokenPayload?.platform || null;
+ const webhookPayload = {
+ id: id,
+ w3id: `@${process.env.W3ID}`,
+ data: input.payload,
+ schemaId: input.ontology,
+ };
+
+ // Fire and forget webhook delivery
+ this.deliverWebhooks(
+ requestingPlatform,
+ webhookPayload
+ );
+
+ return result;
+ } catch (error) {
+ console.error(
+ "Error in updateMetaEnvelopeById:",
+ error
+ );
+ throw error;
+ }
+ }
+ ),
+ deleteMetaEnvelope: this.accessGuard.middleware(
+ async (_: any, { id }: { id: string }) => {
+ await this.db.deleteMetaEnvelope(id);
+ return true;
+ }
+ ),
+ updateEnvelopeValue: this.accessGuard.middleware(
+ async (
+ _: any,
+ {
+ envelopeId,
+ newValue,
+ }: { envelopeId: string; newValue: any }
+ ) => {
+ await this.db.updateEnvelopeValue(envelopeId, newValue);
+ return true;
+ }
+ ),
+ },
};
- },
- });
- return yoga;
- }
+ this.schema = createSchema({
+ typeDefs,
+ resolvers,
+ });
+
+ const yoga = createYoga({
+ schema: this.schema,
+ graphqlEndpoint: "/graphql",
+ graphiql: {
+ defaultQuery: exampleQueries,
+ },
+ context: async ({ request }) => {
+ const authHeader = request.headers.get("authorization") ?? "";
+ const token = authHeader.replace("Bearer ", "");
+
+ if (token) {
+ const id = getJWTHeader(token).kid?.split("#")[0];
+ return {
+ currentUser: id ?? null,
+ };
+ }
+
+ return {
+ currentUser: null,
+ };
+ },
+ });
+
+ return yoga;
+ }
}
diff --git a/infrastructure/evault-core/src/protocol/typedefs.ts b/infrastructure/evault-core/src/protocol/typedefs.ts
index fdd0b7cf..a4b406b8 100644
--- a/infrastructure/evault-core/src/protocol/typedefs.ts
+++ b/infrastructure/evault-core/src/protocol/typedefs.ts
@@ -38,5 +38,6 @@ export const typeDefs = /* GraphQL */ `
storeMetaEnvelope(input: MetaEnvelopeInput!): StoreMetaEnvelopeResult!
deleteMetaEnvelope(id: String!): Boolean!
updateEnvelopeValue(envelopeId: String!, newValue: JSON!): Boolean!
+ updateMetaEnvelopeById(id: String!, input: MetaEnvelopeInput!): StoreMetaEnvelopeResult!
}
`;
diff --git a/infrastructure/evault-core/src/protocol/vault-access-guard.ts b/infrastructure/evault-core/src/protocol/vault-access-guard.ts
index 867237c2..ede3f2c8 100644
--- a/infrastructure/evault-core/src/protocol/vault-access-guard.ts
+++ b/infrastructure/evault-core/src/protocol/vault-access-guard.ts
@@ -1,114 +1,169 @@
import { YogaInitialContext } from "graphql-yoga";
import { DbService } from "../db/db.service";
import { MetaEnvelope } from "../db/types";
+import * as jose from "jose";
+import axios from "axios";
export type VaultContext = YogaInitialContext & {
- currentUser: string | null;
+ currentUser: string | null;
+ tokenPayload?: any;
};
export class VaultAccessGuard {
- constructor(private db: DbService) {}
-
- /**
- * Checks if the current user has access to a meta envelope based on its ACL
- * @param metaEnvelopeId - The ID of the meta envelope to check access for
- * @param context - The GraphQL context containing the current user
- * @returns Promise - Whether the user has access
- */
- private async checkAccess(
- metaEnvelopeId: string,
- context: VaultContext
- ): Promise {
- if (!context.currentUser) {
- const metaEnvelope = await this.db.findMetaEnvelopeById(metaEnvelopeId);
- if (metaEnvelope && metaEnvelope.acl.includes("*")) return true;
- return false;
- }
+ constructor(private db: DbService) {}
+
+ /**
+ * Validates JWT token from Authorization header
+ * @param authHeader - The Authorization header value
+ * @returns Promise - The validated token payload
+ */
+ private async validateToken(
+ authHeader: string | null
+ ): Promise {
+ if (!authHeader || !authHeader.startsWith("Bearer ")) {
+ return null;
+ }
+
+ const token = authHeader.substring(7); // Remove 'Bearer ' prefix
+
+ try {
+ if (!process.env.REGISTRY_URL) {
+ console.error("REGISTRY_URL is not set");
+ return null;
+ }
+
+ const jwksResponse = await axios.get(
+ new URL(
+ `/.well-known/jwks.json`,
+ process.env.REGISTRY_URL
+ ).toString()
+ );
+
+ const JWKS = jose.createLocalJWKSet(jwksResponse.data);
+ const { payload } = await jose.jwtVerify(token, JWKS);
- const metaEnvelope = await this.db.findMetaEnvelopeById(metaEnvelopeId);
- if (!metaEnvelope) {
- return false;
+ return payload;
+ } catch (error) {
+ console.error("Token validation failed:", error);
+ return null;
+ }
}
- // If ACL contains "*", anyone can access
- if (metaEnvelope.acl.includes("*")) {
- return true;
+ /**
+ * Checks if the current user has access to a meta envelope based on its ACL
+ * @param metaEnvelopeId - The ID of the meta envelope to check access for
+ * @param context - The GraphQL context containing the current user
+ * @returns Promise - Whether the user has access
+ */
+ private async checkAccess(
+ metaEnvelopeId: string,
+ context: VaultContext
+ ): Promise {
+ // Validate token if present
+ const authHeader =
+ context.request?.headers?.get("authorization") ??
+ context.request?.headers?.get("Authorization");
+ const tokenPayload = await this.validateToken(authHeader);
+
+ if (tokenPayload) {
+ // Token is valid, set platform context and allow access
+ context.tokenPayload = tokenPayload;
+ return true;
+ }
+
+ // Fallback to original ACL logic if no valid token
+ if (!context.currentUser) {
+ const metaEnvelope = await this.db.findMetaEnvelopeById(
+ metaEnvelopeId
+ );
+ if (metaEnvelope && metaEnvelope.acl.includes("*")) return true;
+ return false;
+ }
+
+ const metaEnvelope = await this.db.findMetaEnvelopeById(metaEnvelopeId);
+ if (!metaEnvelope) {
+ return false;
+ }
+
+ // If ACL contains "*", anyone can access
+ if (metaEnvelope.acl.includes("*")) {
+ return true;
+ }
+
+ // Check if the current user's ID is in the ACL
+ return metaEnvelope.acl.includes(context.currentUser);
}
- // Check if the current user's ID is in the ACL
- return metaEnvelope.acl.includes(context.currentUser);
- }
-
- /**
- * Filters out ACL from meta envelope responses
- * @param metaEnvelope - The meta envelope to filter
- * @returns The filtered meta envelope without ACL
- */
- private filterACL(metaEnvelope: any) {
- if (!metaEnvelope) return null;
- const { acl, ...filtered } = metaEnvelope;
- return filtered;
- }
-
- /**
- * Filters a list of meta envelopes to only include those the user has access to
- * @param envelopes - List of meta envelopes to filter
- * @param context - The GraphQL context containing the current user
- * @returns Promise - Filtered list of meta envelopes
- */
- private async filterEnvelopesByAccess(
- envelopes: MetaEnvelope[],
- context: VaultContext
- ): Promise {
- const filteredEnvelopes = [];
- for (const envelope of envelopes) {
- const hasAccess =
- envelope.acl.includes("*") ||
- envelope.acl.includes(context.currentUser ?? "");
- if (hasAccess) {
- filteredEnvelopes.push(this.filterACL(envelope));
- }
+ /**
+ * Filters out ACL from meta envelope responses
+ * @param metaEnvelope - The meta envelope to filter
+ * @returns The filtered meta envelope without ACL
+ */
+ private filterACL(metaEnvelope: any) {
+ if (!metaEnvelope) return null;
+ const { acl, ...filtered } = metaEnvelope;
+ return filtered;
}
- return filteredEnvelopes;
- }
-
- /**
- * Middleware function to check access before executing a resolver
- * @param resolver - The resolver function to wrap
- * @returns A wrapped resolver that checks access before executing
- */
- public middleware(
- resolver: (parent: T, args: Args, context: VaultContext) => Promise
- ) {
- return async (parent: T, args: Args, context: VaultContext) => {
- // For operations that don't require a specific meta envelope ID (bulk queries)
- if (!args.id && !args.envelopeId) {
- const result = await resolver(parent, args, context);
-
- // If the result is an array of meta envelopes, filter based on access
- if (Array.isArray(result)) {
- return this.filterEnvelopesByAccess(result, context);
+
+ /**
+ * Filters a list of meta envelopes to only include those the user has access to
+ * @param envelopes - List of meta envelopes to filter
+ * @param context - The GraphQL context containing the current user
+ * @returns Promise - Filtered list of meta envelopes
+ */
+ private async filterEnvelopesByAccess(
+ envelopes: MetaEnvelope[],
+ context: VaultContext
+ ): Promise {
+ const filteredEnvelopes = [];
+ for (const envelope of envelopes) {
+ const hasAccess =
+ envelope.acl.includes("*") ||
+ envelope.acl.includes(context.currentUser ?? "");
+ if (hasAccess) {
+ filteredEnvelopes.push(this.filterACL(envelope));
+ }
}
+ return filteredEnvelopes;
+ }
- // If the result is a single meta envelope, filter ACL
- return this.filterACL(result);
- }
-
- // For operations that target a specific meta envelope
- const metaEnvelopeId = args.id || args.envelopeId;
- if (!metaEnvelopeId) {
- const result = await resolver(parent, args, context);
- return this.filterACL(result);
- }
-
- const hasAccess = await this.checkAccess(metaEnvelopeId, context);
- if (!hasAccess) {
- throw new Error("Access denied");
- }
-
- // console.log
- const result = await resolver(parent, args, context);
- return this.filterACL(result);
- };
- }
+ /**
+ * Middleware function to check access before executing a resolver
+ * @param resolver - The resolver function to wrap
+ * @returns A wrapped resolver that checks access before executing
+ */
+ public middleware(
+ resolver: (parent: T, args: Args, context: VaultContext) => Promise
+ ) {
+ return async (parent: T, args: Args, context: VaultContext) => {
+ // For operations that don't require a specific meta envelope ID (bulk queries)
+ if (!args.id && !args.envelopeId) {
+ const result = await resolver(parent, args, context);
+
+ // If the result is an array of meta envelopes, filter based on access
+ if (Array.isArray(result)) {
+ return this.filterEnvelopesByAccess(result, context);
+ }
+
+ // If the result is a single meta envelope, filter ACL
+ return this.filterACL(result);
+ }
+
+ // For operations that target a specific meta envelope
+ const metaEnvelopeId = args.id || args.envelopeId;
+ if (!metaEnvelopeId) {
+ const result = await resolver(parent, args, context);
+ return this.filterACL(result);
+ }
+
+ const hasAccess = await this.checkAccess(metaEnvelopeId, context);
+ if (!hasAccess) {
+ throw new Error("Access denied");
+ }
+
+ // console.log
+ const result = await resolver(parent, args, context);
+ return this.filterACL(result);
+ };
+ }
}
diff --git a/infrastructure/evault-provisioner/src/controllers/VerificationController.ts b/infrastructure/evault-provisioner/src/controllers/VerificationController.ts
index c31adf0d..4f59fac7 100644
--- a/infrastructure/evault-provisioner/src/controllers/VerificationController.ts
+++ b/infrastructure/evault-provisioner/src/controllers/VerificationController.ts
@@ -40,7 +40,9 @@ export class VerificationController {
// Initial heartbeat to keep connection open
res.write(
- `event: connected\ndata: ${JSON.stringify({ hi: "hi" })}\n\n`,
+ `event: connected\ndata: ${JSON.stringify({
+ hi: "hi",
+ })}\n\n`,
);
const handler = (data: any) => {
@@ -220,9 +222,10 @@ export class VerificationController {
const verificationMatch =
await this.verificationService.findOne({
documentId:
- body.data.verification.document.number.value
+ body.data.verification.document.number
+ .value,
});
- console.log("matched", verificationMatch)
+ console.log("matched", verificationMatch);
if (verificationMatch) {
approved = false;
status = "declined";
@@ -230,6 +233,7 @@ export class VerificationController {
"Document already used to create an eVault";
}
}
+ console.log(body.data.verification.document);
await this.verificationService.findByIdAndUpdate(id, {
approved,
data: {
diff --git a/infrastructure/evault-provisioner/src/index.ts b/infrastructure/evault-provisioner/src/index.ts
index 33cf07b6..8a7c8235 100644
--- a/infrastructure/evault-provisioner/src/index.ts
+++ b/infrastructure/evault-provisioner/src/index.ts
@@ -24,7 +24,7 @@ app.use(
methods: ["GET", "POST", "OPTIONS", "PATCH"],
allowedHeaders: ["Content-Type", "Authorization"],
credentials: true,
- }),
+ })
);
// Increase JSON payload limit to 50MB
@@ -45,7 +45,7 @@ const initializeDatabase = async () => {
// Initialize services and controllers
const verificationService = new VerificationService(
- AppDataSource.getRepository("Verification"),
+ AppDataSource.getRepository("Verification")
);
const verificationController = new VerificationController(verificationService);
@@ -68,15 +68,16 @@ app.get("/health", (req: Request, res: Response) => {
res.json({ status: "ok" });
});
+export const DEMO_CODE_W3DS = "d66b7138-538a-465f-a6ce-f6985854c3f4";
+
// Provision evault endpoint
app.post(
"/provision",
async (
req: Request<{}, {}, ProvisionRequest>,
- res: Response,
+ res: Response
) => {
try {
- console.log("provisioning init");
if (!process.env.PUBLIC_REGISTRY_URL)
throw new Error("PUBLIC_REGISTRY_URL is not set");
const { registryEntropy, namespace, verificationId } = req.body;
@@ -88,27 +89,17 @@ app.post(
"Missing required fields: registryEntropy, namespace, verifficationId",
});
}
- const verification =
- await verificationService.findById(verificationId);
- if (!verification) throw new Error("verification doesn't exist");
- if (!verification.approved)
- throw new Error("verification not approved");
- if (verification.consumed)
- throw new Error("This verification ID has already been used");
-
- console.log("jwk");
+
const jwksResponse = await axios.get(
new URL(
`/.well-known/jwks.json`,
- process.env.PUBLIC_REGISTRY_URL,
- ).toString(),
+ process.env.PUBLIC_REGISTRY_URL
+ ).toString()
);
const JWKS = jose.createLocalJWKSet(jwksResponse.data);
-
const { payload } = await jose.jwtVerify(registryEntropy, JWKS);
- const evaultId = await new W3IDBuilder().withGlobal(true).build();
const userId = await new W3IDBuilder()
.withNamespace(namespace)
.withEntropy(payload.entropy as string)
@@ -117,12 +108,28 @@ app.post(
const w3id = userId.id;
- const uri = await provisionEVault(w3id, evaultId.id);
-
+ if (verificationId !== DEMO_CODE_W3DS) {
+ const verification = await verificationService.findById(
+ verificationId
+ );
+ if (!verification)
+ throw new Error("verification doesn't exist");
+ if (!verification.approved)
+ throw new Error("verification not approved");
+ if (verification.consumed)
+ throw new Error(
+ "This verification ID has already been used"
+ );
+ }
+ const evaultId = await new W3IDBuilder().withGlobal(true).build();
+ const uri = await provisionEVault(
+ w3id,
+ process.env.PUBLIC_REGISTRY_URL
+ );
await axios.post(
new URL(
"/register",
- process.env.PUBLIC_REGISTRY_URL,
+ process.env.PUBLIC_REGISTRY_URL
).toString(),
{
ename: w3id,
@@ -133,7 +140,7 @@ app.post(
headers: {
Authorization: `Bearer ${process.env.REGISTRY_SHARED_SECRET}`,
},
- },
+ }
);
res.json({
@@ -143,13 +150,14 @@ app.post(
});
} catch (error) {
const axiosError = error as AxiosError;
+ console.error(error);
res.status(500).json({
success: false,
error: axiosError.response?.data || axiosError.message,
message: "Failed to provision evault instance",
});
}
- },
+ }
);
// Register verification routes
diff --git a/infrastructure/evault-provisioner/src/templates/evault.nomad.ts b/infrastructure/evault-provisioner/src/templates/evault.nomad.ts
index 7a6ea18d..bc2aa115 100644
--- a/infrastructure/evault-provisioner/src/templates/evault.nomad.ts
+++ b/infrastructure/evault-provisioner/src/templates/evault.nomad.ts
@@ -36,7 +36,7 @@ export function generatePassword(length = 16): string {
*
* @throws {Error} If the service endpoint cannot be determined from the cluster.
*/
-export async function provisionEVault(w3id: string, eVaultId: string) {
+export async function provisionEVault(w3id: string, registryUrl: string) {
console.log("starting to provision");
const idParts = w3id.split("@");
w3id = idParts[idParts.length - 1];
@@ -115,6 +115,8 @@ export async function provisionEVault(w3id: string, eVaultId: string) {
{
name: "evault",
image: "merulauvo/evault:latest",
+ // image: "local-evault:latest",
+ // imagePullPolicy: "Never",
ports: [{ containerPort }],
env: [
{
@@ -122,6 +124,10 @@ export async function provisionEVault(w3id: string, eVaultId: string) {
value: "bolt://localhost:7687",
},
{ name: "NEO4J_USER", value: "neo4j" },
+ {
+ name: "REGISTRY_URL",
+ value: registryUrl,
+ },
{
name: "NEO4J_PASSWORD",
value: neo4jPassword,
@@ -183,7 +189,7 @@ export async function provisionEVault(w3id: string, eVaultId: string) {
kind: "Service",
metadata: { name: "evault-service" },
spec: {
- type: "LoadBalancer",
+ type: "NodePort",
selector: { app: "evault" },
ports: [
{
@@ -195,43 +201,23 @@ export async function provisionEVault(w3id: string, eVaultId: string) {
},
});
+ // Get the service and node info
const svc = await coreApi.readNamespacedService({
name: "evault-service",
namespace: namespaceName,
});
- const spec = svc.spec;
- const status = svc.status;
-
- // Check LoadBalancer first (cloud clusters)
- const ingress = status?.loadBalancer?.ingress?.[0];
- if (ingress?.ip || ingress?.hostname) {
- const host = ingress.ip || ingress.hostname;
- const port = spec?.ports?.[0]?.port;
- return `http://${host}:${port}`;
- }
-
- // Fallback: NodePort + Node IP (local clusters or bare-metal)
- const nodePort = spec?.ports?.[0]?.nodePort;
- if (!nodePort) throw new Error("No LoadBalancer or NodePort found.");
+ const nodePort = svc.spec?.ports?.[0]?.nodePort;
+ if (!nodePort) throw new Error("No NodePort assigned");
- // Try getting an external IP from the cluster nodes
+ // Get the node's external IP
const nodes = await coreApi.listNode();
- const address = nodes?.items[0].status.addresses.find(
- (a) => a.type === "ExternalIP" || a.type === "InternalIP",
- )?.address;
+ const node = nodes.items[0];
+ if (!node) throw new Error("No nodes found in cluster");
- if (address) {
- const isMinikubeIp = address === "192.168.49.2";
- return `http://${isMinikubeIp ? address : process.env.IP_ADDR.split("http://")[1]}:${nodePort}`;
- }
+ let externalIP = node.status?.addresses?.find(
+ (addr) => addr.type === "ExternalIP"
+ )?.address;
- // Local fallback: use minikube IP if available
- try {
- const minikubeIP = execSync("minikube ip").toString().trim();
- return `http://${minikubeIP}:${nodePort}`;
- } catch (e) {
- throw new Error(
- "Unable to determine service IP (no LoadBalancer, Node IP, or Minikube IP)",
- );
- }
+ if (!externalIP) externalIP = process.env.IP_ADDR;
+ return `http://${externalIP}:${nodePort}`;
}
diff --git a/infrastructure/web3-adapter/package.json b/infrastructure/web3-adapter/package.json
index 23b7325c..dd7114b1 100644
--- a/infrastructure/web3-adapter/package.json
+++ b/infrastructure/web3-adapter/package.json
@@ -2,7 +2,6 @@
"name": "web3-adapter",
"version": "1.0.0",
"description": "Web3 adapter for platform-specific data mapping to universal schema",
- "type": "module",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
@@ -15,8 +14,13 @@
"check-types": "tsc --noEmit"
},
"dependencies": {
+ "@types/node": "^24.0.0",
+ "axios": "^1.6.7",
"evault-core": "workspace:*",
+ "graphql-request": "^6.1.0",
+ "sqlite3": "^5.1.7",
"test": "^3.3.0",
+ "uuid": "^11.1.0",
"vitest": "^3.1.2"
},
"devDependencies": {
diff --git a/infrastructure/web3-adapter/src/db/index.ts b/infrastructure/web3-adapter/src/db/index.ts
new file mode 100644
index 00000000..fed4289f
--- /dev/null
+++ b/infrastructure/web3-adapter/src/db/index.ts
@@ -0,0 +1 @@
+export * from "./mapping.db";
\ No newline at end of file
diff --git a/infrastructure/web3-adapter/src/db/mapping.db.ts b/infrastructure/web3-adapter/src/db/mapping.db.ts
new file mode 100644
index 00000000..e9bef959
--- /dev/null
+++ b/infrastructure/web3-adapter/src/db/mapping.db.ts
@@ -0,0 +1,182 @@
+import sqlite3 from "sqlite3";
+import { join } from "path";
+import { promisify } from "util";
+
+export class MappingDatabase {
+ private db: sqlite3.Database;
+ private runAsync: (sql: string, params?: any) => Promise;
+ private getAsync: (sql: string, params?: any) => Promise;
+ private allAsync: (sql: string, params?: any) => Promise;
+
+ constructor(dbPath: string) {
+ // Ensure the directory exists
+ const fullPath = join(dbPath, "mappings.db");
+ this.db = new sqlite3.Database(fullPath);
+
+ // Promisify database methods
+ this.runAsync = promisify(this.db.run.bind(this.db));
+ this.getAsync = promisify(this.db.get.bind(this.db));
+ this.allAsync = promisify(this.db.all.bind(this.db));
+
+ // Initialize the database with the required tables
+ this.initialize();
+ }
+
+ private async initialize() {
+ await this.runAsync(`
+ CREATE TABLE IF NOT EXISTS id_mappings (
+ local_id TEXT NOT NULL,
+ global_id TEXT NOT NULL,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ PRIMARY KEY (global_id)
+ )
+ `);
+
+ await this.runAsync(`
+ CREATE INDEX IF NOT EXISTS idx_local_id ON id_mappings(local_id)
+ `);
+ }
+
+ /**
+ * Store a mapping between local and global IDs
+ */
+ public async storeMapping(params: {
+ localId: string;
+ globalId: string;
+ }): Promise {
+ // Validate inputs
+ if (!params.localId || !params.globalId) {
+ throw new Error(
+ "Invalid mapping parameters: all fields are required"
+ );
+ }
+
+ console.log("storing mapping g:l", params.globalId, params.localId);
+
+ // Check if mapping already exists
+ const existingMapping = await this.getGlobalId(params.localId);
+
+ if (existingMapping) {
+ return;
+ }
+
+ try {
+ await this.runAsync(
+ `INSERT INTO id_mappings (local_id, global_id)
+ VALUES (?, ?)`,
+ [params.localId, params.globalId]
+ );
+
+ const storedMapping = await this.getGlobalId(params.localId);
+
+ if (storedMapping !== params.globalId) {
+ console.log(
+ "storedMappingError",
+ storedMapping,
+ params.globalId
+ );
+ console.error("Failed to store mapping");
+ return;
+ }
+ } catch (error) {
+ throw error;
+ }
+ }
+
+ /**
+ * Get the global ID for a local ID
+ */
+ public async getGlobalId(localId: string): Promise {
+ if (!localId) {
+ return null;
+ }
+
+ try {
+ const result = await this.getAsync(
+ `SELECT global_id
+ FROM id_mappings
+ WHERE local_id = ?`,
+ [localId]
+ );
+ return result?.global_id ?? null;
+ } catch (error) {
+ console.error("Error getting global ID:", error);
+ return null;
+ }
+ }
+
+ /**
+ * Get the local ID for a global ID
+ */
+ public async getLocalId(globalId: string): Promise {
+ if (!globalId) {
+ return null;
+ }
+
+ try {
+ const result = await this.getAsync(
+ `SELECT local_id
+ FROM id_mappings
+ WHERE global_id = ?`,
+ [globalId]
+ );
+ return result?.local_id ?? null;
+ } catch (error) {
+ return null;
+ }
+ }
+
+ /**
+ * Delete a mapping
+ */
+ public async deleteMapping(localId: string): Promise {
+ if (!localId) {
+ return;
+ }
+
+ try {
+ await this.runAsync(
+ `DELETE FROM id_mappings
+ WHERE local_id = ?`,
+ [localId]
+ );
+ } catch (error) {
+ throw error;
+ }
+ }
+
+ /**
+ * Get all mappings
+ */
+ public async getAllMappings(): Promise<
+ Array<{
+ localId: string;
+ globalId: string;
+ }>
+ > {
+ try {
+ const results = await this.allAsync(
+ `SELECT local_id, global_id
+ FROM id_mappings`
+ );
+
+ return results.map(({ local_id, global_id }) => ({
+ localId: local_id,
+ globalId: global_id,
+ }));
+ } catch (error) {
+ return [];
+ }
+ }
+
+ /**
+ * Close the database connection
+ */
+ public close(): void {
+ try {
+ this.db.close();
+ } catch (error) {
+ console.error("Error closing database connection:", error);
+ }
+ }
+}
diff --git a/infrastructure/web3-adapter/src/evault/evault.ts b/infrastructure/web3-adapter/src/evault/evault.ts
new file mode 100644
index 00000000..3f73d8ce
--- /dev/null
+++ b/infrastructure/web3-adapter/src/evault/evault.ts
@@ -0,0 +1,387 @@
+import { GraphQLClient } from "graphql-request";
+import axios, { AxiosInstance } from "axios";
+import { v4 } from "uuid";
+
+export interface MetaEnvelope {
+ id?: string | null;
+ schemaId: string;
+ data: Record;
+ w3id: string;
+}
+
+// Configuration constants
+const CONFIG = {
+ REQUEST_TIMEOUT: 30000, // 30 seconds
+ CONNECTION_TIMEOUT: 10000, // 10 seconds
+ TOKEN_REFRESH_THRESHOLD: 5 * 60 * 1000, // 5 minutes before expiry
+ MAX_RETRIES: 3,
+ RETRY_DELAY: 1000, // 1 second base delay
+ CONNECTION_POOL_SIZE: 10,
+} as const;
+
+const STORE_META_ENVELOPE = `
+ mutation StoreMetaEnvelope($input: MetaEnvelopeInput!) {
+ storeMetaEnvelope(input: $input) {
+ metaEnvelope {
+ id
+ ontology
+ parsed
+ }
+ }
+ }
+`;
+
+const FETCH_META_ENVELOPE = `
+ query FetchMetaEnvelope($id: ID!) {
+ metaEnvelope(id: $id) {
+ id
+ ontology
+ parsed
+ }
+ }
+`;
+
+const UPDATE_META_ENVELOPE = `
+ mutation UpdateMetaEnvelopeById($id: String!, $input: MetaEnvelopeInput!) {
+ updateMetaEnvelopeById(id: $id, input: $input) {
+ metaEnvelope {
+ id
+ ontology
+ parsed
+ }
+ envelopes {
+ id
+ ontology
+ value
+ valueType
+ }
+ }
+ }
+`;
+
+interface MetaEnvelopeResponse {
+ metaEnvelope: MetaEnvelope;
+}
+
+interface StoreMetaEnvelopeResponse {
+ storeMetaEnvelope: {
+ metaEnvelope: {
+ id: string;
+ ontology: string;
+ envelopes: Array<{
+ id: string;
+ ontology: string;
+ value: any;
+ valueType: string;
+ }>;
+ parsed: any;
+ };
+ envelopes: Array<{
+ id: string;
+ ontology: string;
+ value: any;
+ valueType: string;
+ }>;
+ };
+ updateMetaEnvelopeById: {
+ metaEnvelope: {
+ id: string;
+ ontology: string;
+ envelopes: Array<{
+ id: string;
+ ontology: string;
+ value: any;
+ valueType: string;
+ }>;
+ parsed: any;
+ };
+ envelopes: Array<{
+ id: string;
+ ontology: string;
+ value: any;
+ valueType: string;
+ }>;
+ };
+}
+
+interface PlatformTokenResponse {
+ token: string;
+ expiresAt?: number; // Unix timestamp when token expires
+}
+
+interface TokenInfo {
+ token: string;
+ expiresAt: number;
+ obtainedAt: number;
+}
+
+export class EVaultClient {
+ private client: GraphQLClient | null = null;
+ private endpoint: string | null = null;
+ private tokenInfo: TokenInfo | null = null;
+ private httpClient: AxiosInstance;
+ private isDisposed = false;
+
+ constructor(private registryUrl: string, private platform: string) {
+ // Configure axios with connection pooling and timeouts
+ this.httpClient = axios.create({
+ timeout: CONFIG.REQUEST_TIMEOUT,
+ maxRedirects: 3,
+ // Connection pooling configuration
+ httpAgent: new (require('http').Agent)({
+ keepAlive: true,
+ maxSockets: CONFIG.CONNECTION_POOL_SIZE,
+ timeout: CONFIG.CONNECTION_TIMEOUT,
+ }),
+ httpsAgent: new (require('https').Agent)({
+ keepAlive: true,
+ maxSockets: CONFIG.CONNECTION_POOL_SIZE,
+ timeout: CONFIG.CONNECTION_TIMEOUT,
+ }),
+ });
+ }
+
+ /**
+ * Cleanup method to properly dispose of resources
+ */
+ public dispose(): void {
+ if (this.isDisposed) return;
+
+ this.isDisposed = true;
+ this.client = null;
+ this.endpoint = null;
+ this.tokenInfo = null;
+
+ // Close HTTP agents to free connections
+ if (this.httpClient.defaults.httpAgent) {
+ this.httpClient.defaults.httpAgent.destroy();
+ }
+ if (this.httpClient.defaults.httpsAgent) {
+ this.httpClient.defaults.httpsAgent.destroy();
+ }
+ }
+
+ /**
+ * Retry wrapper with exponential backoff
+ */
+ private async withRetry(
+ operation: () => Promise,
+ maxRetries: number = CONFIG.MAX_RETRIES
+ ): Promise {
+ let lastError: Error;
+
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
+ try {
+ return await operation();
+ } catch (error) {
+ lastError = error as Error;
+
+ // Don't retry on the last attempt
+ if (attempt === maxRetries) break;
+
+ // Don't retry on certain errors
+ if (error instanceof Error) {
+ const isRetryable = !(
+ error.message.includes('401') ||
+ error.message.includes('403') ||
+ error.message.includes('404')
+ );
+
+ if (!isRetryable) break;
+ }
+
+ // Exponential backoff
+ const delay = CONFIG.RETRY_DELAY * Math.pow(2, attempt);
+ await new Promise(resolve => setTimeout(resolve, delay));
+ }
+ }
+
+ throw lastError!;
+ }
+
+ /**
+ * Requests a platform token from the registry
+ * @returns Promise - The platform token
+ */
+ private async requestPlatformToken(): Promise {
+ try {
+ const response = await this.httpClient.post(
+ new URL("/platforms/certification", this.registryUrl).toString(),
+ { platform: this.platform },
+ {
+ headers: {
+ "Content-Type": "application/json",
+ },
+ timeout: CONFIG.REQUEST_TIMEOUT,
+ }
+ );
+
+ const now = Date.now();
+ const expiresAt = response.data.expiresAt || (now + 3600000); // Default 1 hour
+
+ return {
+ token: response.data.token,
+ expiresAt,
+ obtainedAt: now,
+ };
+ } catch (error) {
+ console.error("Error requesting platform token:", error);
+ throw new Error("Failed to request platform token");
+ }
+ }
+
+ /**
+ * Checks if token needs refresh
+ */
+ private isTokenExpired(): boolean {
+ if (!this.tokenInfo) return true;
+
+ const now = Date.now();
+ const timeUntilExpiry = this.tokenInfo.expiresAt - now;
+
+ return timeUntilExpiry <= CONFIG.TOKEN_REFRESH_THRESHOLD;
+ }
+
+ /**
+ * Ensures we have a valid platform token, requesting one if needed
+ * @returns Promise - The platform token
+ */
+ private async ensurePlatformToken(): Promise {
+ if (!this.tokenInfo || this.isTokenExpired()) {
+ this.tokenInfo = await this.requestPlatformToken();
+ }
+ return this.tokenInfo.token;
+ }
+
+ private async resolveEndpoint(w3id: string): Promise {
+ try {
+ const response = await this.httpClient.get(
+ new URL(`/resolve?w3id=${w3id}`, this.registryUrl).toString(),
+ {
+ timeout: CONFIG.REQUEST_TIMEOUT,
+ }
+ );
+ return new URL("/graphql", response.data.uri).toString();
+ } catch (error) {
+ console.error("Error resolving eVault endpoint:", error);
+ throw new Error("Failed to resolve eVault endpoint");
+ }
+ }
+
+ private async ensureClient(w3id: string): Promise {
+ if (this.isDisposed) {
+ throw new Error("EVaultClient has been disposed");
+ }
+
+ if (!this.endpoint || !this.client) {
+ this.endpoint = await this.resolveEndpoint(w3id).catch(() => null);
+ if (!this.endpoint) throw new Error("Failed to resolve endpoint");
+
+ // Get platform token and create client with authorization header
+ const token = await this.ensurePlatformToken();
+ this.client = new GraphQLClient(this.endpoint, {
+ headers: {
+ authorization: `Bearer ${token}`,
+ },
+ });
+ }
+ return this.client;
+ }
+
+ async storeMetaEnvelope(envelope: MetaEnvelope): Promise {
+ return this.withRetry(async () => {
+ const client = await this.ensureClient(envelope.w3id).catch(() => {
+ return null;
+ });
+ if (!client) return v4();
+
+ console.log("sending payload", envelope);
+
+ const response = await client
+ .request(STORE_META_ENVELOPE, {
+ input: {
+ ontology: envelope.schemaId,
+ payload: envelope.data,
+ acl: ["*"],
+ },
+ })
+ .catch(() => null);
+
+ if (!response) return v4();
+ return response.storeMetaEnvelope.metaEnvelope.id;
+ });
+ }
+
+ async storeReference(referenceId: string, w3id: string): Promise {
+ return this.withRetry(async () => {
+ const client = await this.ensureClient(w3id);
+
+ const response = await client
+ .request(STORE_META_ENVELOPE, {
+ input: {
+ ontology: "reference",
+ payload: {
+ _by_reference: referenceId,
+ },
+ acl: ["*"],
+ },
+ })
+ .catch(() => null);
+
+ if (!response) {
+ console.error("Failed to store reference");
+ throw new Error("Failed to store reference");
+ }
+ });
+ }
+
+ async fetchMetaEnvelope(id: string, w3id: string): Promise {
+ return this.withRetry(async () => {
+ const client = await this.ensureClient(w3id);
+
+ try {
+ const response = await client.request(
+ FETCH_META_ENVELOPE,
+ {
+ id,
+ w3id,
+ }
+ );
+ return response.metaEnvelope;
+ } catch (error) {
+ console.error("Error fetching meta envelope:", error);
+ throw error;
+ }
+ });
+ }
+
+ async updateMetaEnvelopeById(
+ id: string,
+ envelope: MetaEnvelope
+ ): Promise {
+ return this.withRetry(async () => {
+ console.log("sending to eVault", envelope.w3id);
+ const client = await this.ensureClient(envelope.w3id).catch(() => null);
+ if (!client) throw new Error("Failed to establish client connection");
+
+ try {
+ const variables = {
+ id,
+ input: {
+ ontology: envelope.schemaId,
+ payload: envelope.data,
+ acl: ["*"],
+ },
+ };
+
+ const response = await client.request(
+ UPDATE_META_ENVELOPE,
+ variables
+ );
+ } catch (error) {
+ console.error("Error updating meta envelope:", error);
+ throw error;
+ }
+ });
+ }
+}
diff --git a/infrastructure/web3-adapter/src/index.ts b/infrastructure/web3-adapter/src/index.ts
new file mode 100644
index 00000000..9e3039ca
--- /dev/null
+++ b/infrastructure/web3-adapter/src/index.ts
@@ -0,0 +1,156 @@
+import * as fs from "fs/promises";
+import path from "path";
+import { IMapping } from "./mapper/mapper.types";
+import { fromGlobal, toGlobal } from "./mapper/mapper";
+import { MappingDatabase } from "./db";
+import { EVaultClient } from "./evault/evault";
+import { v4 as uuidv4 } from "uuid";
+
+export class Web3Adapter {
+ mapping: Record = {};
+ mappingDb: MappingDatabase;
+ evaultClient: EVaultClient;
+ lockedIds: string[] = [];
+ platform: string;
+
+ constructor(
+ private readonly config: {
+ schemasPath: string;
+ dbPath: string;
+ registryUrl: string;
+ platform: string;
+ }
+ ) {
+ this.readPaths();
+ this.mappingDb = new MappingDatabase(config.dbPath);
+ this.evaultClient = new EVaultClient(
+ config.registryUrl,
+ config.platform
+ );
+ this.platform = config.platform;
+ }
+
+ async readPaths() {
+ const allRawFiles = await fs.readdir(this.config.schemasPath);
+ const mappingFiles = allRawFiles.filter((p: string) =>
+ p.endsWith(".json")
+ );
+
+ for (const mappingFile of mappingFiles) {
+ const mappingFileContent = await fs.readFile(
+ path.join(this.config.schemasPath, mappingFile)
+ );
+ const mappingParsed = JSON.parse(
+ mappingFileContent.toString()
+ ) as IMapping;
+ this.mapping[mappingParsed.tableName] = mappingParsed;
+ }
+ }
+
+ addToLockedIds(id: string) {
+ this.lockedIds.push(id);
+ console.log("Added", this.lockedIds);
+ setTimeout(() => {
+ this.lockedIds = this.lockedIds.filter((f) => f !== id);
+ }, 15_000);
+ }
+
+ async handleChange(props: {
+ data: Record;
+ tableName: string;
+ participants?: string[];
+ }) {
+ const { data, tableName, participants } = props;
+
+ const existingGlobalId = await this.mappingDb.getGlobalId(
+ data.id as string
+ );
+
+ console.log(this.mapping, tableName, this.mapping[tableName]);
+
+ // If we already have a mapping, use that global ID
+ if (existingGlobalId) {
+ if (this.lockedIds.includes(existingGlobalId)) return;
+ const global = await toGlobal({
+ data,
+ mapping: this.mapping[tableName],
+ mappingStore: this.mappingDb,
+ });
+
+ this.evaultClient
+ .updateMetaEnvelopeById(existingGlobalId, {
+ id: existingGlobalId,
+ w3id: global.ownerEvault as string,
+ data: global.data,
+ schemaId: this.mapping[tableName].schemaId,
+ })
+ .catch(() => console.error("failed to sync update"));
+
+ return {
+ id: existingGlobalId,
+ w3id: global.ownerEvault as string,
+ data: global.data,
+ schemaId: this.mapping[tableName].schemaId,
+ };
+ }
+
+ // For new entities, create a new global ID
+ const global = await toGlobal({
+ data,
+ mapping: this.mapping[tableName],
+ mappingStore: this.mappingDb,
+ });
+
+ let globalId: string;
+ if (global.ownerEvault) {
+ globalId = await this.evaultClient.storeMetaEnvelope({
+ id: null,
+ w3id: global.ownerEvault as string,
+ data: global.data,
+ schemaId: this.mapping[tableName].schemaId,
+ });
+ console.log("created new meta-env", globalId);
+ } else {
+ return;
+ }
+
+ // Store the mapping
+ await this.mappingDb.storeMapping({
+ localId: data.id as string,
+ globalId,
+ });
+
+ // Handle references for other participants
+ const otherEvaults = (participants ?? []).filter(
+ (i: string) => i !== global.ownerEvault
+ );
+ for (const evault of otherEvaults) {
+ await this.evaultClient.storeReference(
+ `${global.ownerEvault}/${globalId}`,
+ evault
+ );
+ }
+
+ return {
+ id: globalId,
+ w3id: global.ownerEvault as string,
+ data: global.data,
+ schemaId: this.mapping[tableName].schemaId,
+ };
+ }
+
+ async fromGlobal(props: {
+ data: Record;
+ mapping: IMapping;
+ }) {
+ const { data, mapping } = props;
+
+ const local = await fromGlobal({
+ data,
+ mapping,
+ mappingStore: this.mappingDb,
+ });
+
+ return local;
+ }
+}
diff --git a/infrastructure/web3-adapter/src/mapper/mapper.ts b/infrastructure/web3-adapter/src/mapper/mapper.ts
new file mode 100644
index 00000000..a7ad6872
--- /dev/null
+++ b/infrastructure/web3-adapter/src/mapper/mapper.ts
@@ -0,0 +1,273 @@
+import { IMappingConversionOptions, IMapperResponse } from "./mapper.types";
+
+export function getValueByPath(obj: Record, path: string): any {
+ // Handle array mapping case (e.g., "images[].src")
+ if (path.includes("[]")) {
+ const [arrayPath, fieldPath] = path.split("[]");
+ const array = getValueByPath(obj, arrayPath);
+
+ if (!Array.isArray(array)) {
+ return [];
+ }
+
+ // If there's a field path after [], map through the array
+ if (fieldPath) {
+ return array.map((item) =>
+ getValueByPath(item, fieldPath.slice(1))
+ ); // Remove the leading dot
+ }
+
+ return array;
+ }
+
+ // Handle regular path case
+ const parts = path.split(".");
+ return parts.reduce((acc: any, part: string) => {
+ if (acc === null || acc === undefined) return undefined;
+ return acc[part];
+ }, obj);
+}
+
+async function extractOwnerEvault(
+ data: Record,
+ ownerEnamePath: string
+): Promise {
+ if (!ownerEnamePath || ownerEnamePath === "null") {
+ return null;
+ }
+ if (!ownerEnamePath.includes("(")) {
+ return (data[ownerEnamePath] as string) || null;
+ }
+
+ const [_, fieldPathRaw] = ownerEnamePath.split("(");
+ const fieldPath = fieldPathRaw.replace(")", "");
+ let value = getValueByPath(data, fieldPath);
+ if (Array.isArray(value)) return value[0];
+ console.log("OWNER PATH", value);
+ if (value.includes("(") && value.includes(")")) {
+ value = value.split("(")[1].split(")")[0];
+ }
+ return (value as string) || null;
+}
+
+export async function fromGlobal({
+ data,
+ mapping,
+ mappingStore,
+}: IMappingConversionOptions): Promise> {
+ const result: Record = {};
+
+ for (let [localKey, globalPathRaw] of Object.entries(
+ mapping.localToUniversalMap
+ )) {
+ let value: any;
+ let targetKey: string = localKey;
+ let tableRef: string | null = null;
+
+ const internalFnMatch = globalPathRaw.match(/^__(\w+)\((.+)\)$/);
+ if (internalFnMatch) {
+ const [, outerFn, innerExpr] = internalFnMatch;
+
+ if (outerFn === "date") {
+ const calcMatch = innerExpr.match(/^calc\((.+)\)$/);
+ if (calcMatch) {
+ const calcResult = evaluateCalcExpression(
+ calcMatch[1],
+ data
+ );
+ value =
+ calcResult !== undefined
+ ? new Date(calcResult).toISOString()
+ : undefined;
+ } else {
+ const rawVal = getValueByPath(data, innerExpr);
+ if (typeof rawVal === "number") {
+ value = new Date(rawVal).toISOString();
+ } else if (rawVal?._seconds) {
+ value = new Date(rawVal._seconds * 1000).toISOString();
+ } else if (rawVal instanceof Date) {
+ value = rawVal.toISOString();
+ } else {
+ value = undefined;
+ }
+ }
+ } else if (outerFn === "calc") {
+ value = evaluateCalcExpression(innerExpr, data);
+ }
+
+ result[targetKey] = value;
+ continue;
+ }
+ let pathRef = globalPathRaw;
+ if (globalPathRaw.includes("(") && globalPathRaw.includes(")")) {
+ tableRef = globalPathRaw.split("(")[0];
+ }
+ if (pathRef.includes(",")) {
+ pathRef = pathRef.split(",")[1];
+ }
+ value = getValueByPath(data, pathRef);
+
+ if (tableRef) {
+ if (Array.isArray(value)) {
+ value = await Promise.all(
+ value.map(async (v) => {
+ const localId = await mappingStore.getLocalId(v);
+
+ return localId ? `${tableRef}(${localId})` : null;
+ })
+ );
+ } else {
+ value = await mappingStore.getLocalId(value);
+ value = value ? `${tableRef}(${value})` : null;
+ }
+ }
+
+ result[localKey] = value;
+ }
+
+ return {
+ data: result,
+ };
+}
+
+function evaluateCalcExpression(
+ expr: string,
+ context: Record
+): number | undefined {
+ const tokens = expr
+ .split(/[^\w.]+/)
+ .map((t) => t.trim())
+ .filter(Boolean);
+
+ let resolvedExpr = expr;
+ for (const token of tokens) {
+ const value = getValueByPath(context, token);
+ if (typeof value !== "undefined") {
+ resolvedExpr = resolvedExpr.replace(
+ new RegExp(`\\b${token.replace(".", "\\.")}\\b`, "g"),
+ value
+ );
+ }
+ }
+
+ try {
+ return Function('"use strict"; return (' + resolvedExpr + ")")();
+ } catch {
+ return undefined;
+ }
+}
+
+export async function toGlobal({
+ data,
+ mapping,
+ mappingStore,
+}: IMappingConversionOptions): Promise {
+ const result: Record = {};
+
+ for (let [localKey, globalPathRaw] of Object.entries(
+ mapping.localToUniversalMap
+ )) {
+ let value: any;
+ let targetKey: string = globalPathRaw;
+
+ if (globalPathRaw.includes(",")) {
+ const [_, alias] = globalPathRaw.split(",");
+ targetKey = alias;
+ }
+
+ if (localKey.includes("[]")) {
+ const [arrayPath, innerPathRaw] = localKey.split("[]");
+ const cleanInnerPath = innerPathRaw.startsWith(".")
+ ? innerPathRaw.slice(1)
+ : innerPathRaw;
+ const array = getValueByPath(data, arrayPath);
+ value = Array.isArray(array)
+ ? array.map((item) => getValueByPath(item, cleanInnerPath))
+ : undefined;
+ result[targetKey] = value;
+ continue;
+ }
+
+ const internalFnMatch = globalPathRaw.match(/^__(\w+)\((.+)\)$/);
+ if (internalFnMatch) {
+ const [, outerFn, innerExpr] = internalFnMatch;
+
+ if (outerFn === "date") {
+ const calcMatch = innerExpr.match(/^calc\((.+)\)$/);
+ if (calcMatch) {
+ const calcResult = evaluateCalcExpression(
+ calcMatch[1],
+ data
+ );
+ value =
+ calcResult !== undefined
+ ? new Date(calcResult).toISOString()
+ : undefined;
+ } else {
+ const rawVal = getValueByPath(data, innerExpr);
+ if (typeof rawVal === "number") {
+ value = new Date(rawVal).toISOString();
+ } else if (rawVal?._seconds) {
+ value = new Date(rawVal._seconds * 1000).toISOString();
+ } else if (rawVal instanceof Date) {
+ value = rawVal.toISOString();
+ } else {
+ value = undefined;
+ }
+ }
+ } else if (outerFn === "calc") {
+ value = evaluateCalcExpression(innerExpr, data);
+ }
+
+ result[targetKey] = value;
+ continue;
+ }
+
+ const relationMatch = globalPathRaw.match(/^(\w+)\((.+?)\)(\[\])?$/);
+ if (relationMatch) {
+ const [, tableRef, pathInData, isArray] = relationMatch;
+ const refValue = getValueByPath(data, pathInData);
+ if (isArray) {
+ value = Array.isArray(refValue)
+ ? refValue.map((v) => `@${v}`)
+ : [];
+ } else {
+ value = refValue ? `@${refValue}` : undefined;
+ }
+ result[targetKey] = value;
+ continue;
+ }
+
+ let pathRef: string = globalPathRaw.includes(",")
+ ? globalPathRaw
+ : localKey;
+ let tableRef: string | null = null;
+ if (globalPathRaw.includes("(") && globalPathRaw.includes(")")) {
+ pathRef = globalPathRaw.split("(")[1].split(")")[0];
+ tableRef = globalPathRaw.split("(")[0];
+ }
+ if (globalPathRaw.includes(",")) {
+ pathRef = pathRef.split(",")[0];
+ }
+ value = getValueByPath(data, pathRef);
+ if (tableRef) {
+ if (Array.isArray(value)) {
+ value = await Promise.all(
+ value.map(
+ async (v) =>
+ (await mappingStore.getGlobalId(v)) ?? undefined
+ )
+ );
+ } else {
+ value = (await mappingStore.getGlobalId(value)) ?? undefined;
+ }
+ }
+ result[targetKey] = value;
+ }
+ const ownerEvault = await extractOwnerEvault(data, mapping.ownerEnamePath);
+
+ return {
+ ownerEvault,
+ data: result,
+ };
+}
diff --git a/infrastructure/web3-adapter/src/mapper/mapper.types.ts b/infrastructure/web3-adapter/src/mapper/mapper.types.ts
new file mode 100644
index 00000000..4b9bff6c
--- /dev/null
+++ b/infrastructure/web3-adapter/src/mapper/mapper.types.ts
@@ -0,0 +1,47 @@
+import { MappingDatabase } from "../db";
+
+export interface IMapping {
+ /**
+ * Name of the local table, this would be consumed by other schemas to
+ * identify relations
+ */
+ tableName: string;
+
+ /**
+ * Schema Identifier for the global schema this table maps to
+ */
+ schemaId: string;
+
+ /**
+ * Path used to determine which eVault owns this entry.
+ *
+ * This can be a direct field on the table or a nested path via a foreign table.
+ *
+ * - For direct fields, use the field name (e.g. `"ename"`).
+ * - For nested ownership, use a function-like syntax to reference another table
+ * and field (e.g. `"user(createdBy.ename)"` means follow the `createdBy` field,
+ * then resolve `ename` from the `user` table).
+ *
+ * Use `tableName(fieldPath)` to reference a field from another table.
+ *
+ * @example "ename" โ direct reference to a field on the same table
+ * @example "user(createdBy.ename)" โ nested reference via the `user` table
+ */
+ ownerEnamePath: string;
+
+ /**
+ * String to String mapping between what path maps to what global ontology
+ */
+ localToUniversalMap: Record;
+}
+
+export interface IMappingConversionOptions {
+ data: Record;
+ mapping: IMapping;
+ mappingStore: MappingDatabase;
+}
+
+export interface IMapperResponse {
+ ownerEvault: string | null;
+ data: Record;
+}
diff --git a/infrastructure/web3-adapter/tsconfig.json b/infrastructure/web3-adapter/tsconfig.json
index 12ea365d..062be0be 100644
--- a/infrastructure/web3-adapter/tsconfig.json
+++ b/infrastructure/web3-adapter/tsconfig.json
@@ -1,17 +1,25 @@
{
- "compilerOptions": {
- "target": "ES2020",
- "module": "ES2020",
- "moduleResolution": "node",
- "lib": ["ES2020"],
- "declaration": true,
- "outDir": "./dist",
- "rootDir": "./src",
- "strict": true,
- "esModuleInterop": true,
- "skipLibCheck": true,
- "forceConsistentCasingInFileNames": true
- },
- "include": ["src/**/*"],
- "exclude": ["node_modules", "dist", "**/*.test.ts"]
-}
\ No newline at end of file
+ "compilerOptions": {
+ "target": "ES2021",
+ "module": "NodeNext",
+ "moduleResolution": "node",
+ "lib": [
+ "ES2020"
+ ],
+ "declaration": true,
+ "outDir": "./dist",
+ "rootDir": "./src",
+ "strict": true,
+ "esModuleInterop": true,
+ "skipLibCheck": true,
+ "forceConsistentCasingInFileNames": true
+ },
+ "include": [
+ "src/**/*"
+ ],
+ "exclude": [
+ "node_modules",
+ "dist",
+ "**/*.test.ts"
+ ]
+}
diff --git a/platforms/blabsy-w3ds-auth-api/package.json b/platforms/blabsy-w3ds-auth-api/package.json
index e8abf137..06d84693 100644
--- a/platforms/blabsy-w3ds-auth-api/package.json
+++ b/platforms/blabsy-w3ds-auth-api/package.json
@@ -1,8 +1,8 @@
{
"name": "blabsy-w3ds-auth-api",
"version": "1.0.0",
- "description": "Piqtique Social Media Platform API",
- "main": "src/index.ts",
+ "description": "Web3 Data Sync Authentication API for Blabsy",
+ "main": "dist/index.js",
"scripts": {
"start": "ts-node src/index.ts",
"dev": "nodemon --exec ts-node src/index.ts",
@@ -10,7 +10,8 @@
"typeorm": "typeorm-ts-node-commonjs",
"migration:generate": "npm run typeorm migration:generate -- -d src/database/data-source.ts",
"migration:run": "npm run typeorm migration:run -- -d src/database/data-source.ts",
- "migration:revert": "npm run typeorm migration:revert -- -d src/database/data-source.ts"
+ "migration:revert": "npm run typeorm migration:revert -- -d src/database/data-source.ts",
+ "test": "jest"
},
"dependencies": {
"axios": "^1.6.7",
@@ -18,18 +19,20 @@
"dotenv": "^16.4.5",
"eventsource-polyfill": "^0.9.6",
"express": "^4.18.2",
- "firebase-admin": "^13.4.0",
+ "firebase-admin": "^12.0.0",
"jsonwebtoken": "^9.0.2",
"pg": "^8.11.3",
"reflect-metadata": "^0.2.1",
"typeorm": "^0.3.20",
- "uuid": "^9.0.1"
+ "uuid": "^9.0.1",
+ "graphql": "^16.8.1",
+ "graphql-request": "^6.1.0"
},
"devDependencies": {
"@types/cors": "^2.8.17",
"@types/express": "^4.17.21",
"@types/jsonwebtoken": "^9.0.5",
- "@types/node": "^20.11.24",
+ "@types/node": "^20.11.19",
"@types/pg": "^8.11.2",
"@types/uuid": "^9.0.8",
"@typescript-eslint/eslint-plugin": "^7.0.1",
@@ -37,6 +40,10 @@
"eslint": "^8.56.0",
"nodemon": "^3.0.3",
"ts-node": "^10.9.2",
- "typescript": "^5.3.3"
+ "typescript": "^5.3.3",
+ "@types/jest": "^29.5.12",
+ "jest": "^29.7.0",
+ "ts-jest": "^29.1.2",
+ "ts-node-dev": "^2.0.0"
}
}
diff --git a/platforms/blabsy-w3ds-auth-api/src/controllers/AuthController.ts b/platforms/blabsy-w3ds-auth-api/src/controllers/AuthController.ts
index f398c850..0d1365e8 100644
--- a/platforms/blabsy-w3ds-auth-api/src/controllers/AuthController.ts
+++ b/platforms/blabsy-w3ds-auth-api/src/controllers/AuthController.ts
@@ -1,7 +1,6 @@
import { Request, Response } from "express";
import { v4 as uuidv4 } from "uuid";
import { EventEmitter } from "events";
-import { applicationDefault, initializeApp } from "firebase-admin/app";
import { auth } from "firebase-admin";
export class AuthController {
private eventEmitter: EventEmitter;
@@ -57,9 +56,6 @@ export class AuthController {
if (!ename) {
return res.status(400).json({ error: "ename is required" });
}
- initializeApp({
- credential: applicationDefault(),
- });
const token = await auth().createCustomToken(ename);
console.log(token);
diff --git a/platforms/blabsy-w3ds-auth-api/src/controllers/WebhookController.ts b/platforms/blabsy-w3ds-auth-api/src/controllers/WebhookController.ts
new file mode 100644
index 00000000..0cd0e001
--- /dev/null
+++ b/platforms/blabsy-w3ds-auth-api/src/controllers/WebhookController.ts
@@ -0,0 +1,303 @@
+import { Request, Response } from "express";
+import { Web3Adapter } from "../../../../infrastructure/web3-adapter/src/index";
+import path from "path";
+import dotenv from "dotenv";
+import { getFirestore } from "firebase-admin/firestore";
+import { Timestamp } from "firebase-admin/firestore";
+
+// Define types locally since we can't import from @blabsy/types
+type User = {
+ id: string;
+ bio: string | null;
+ name: string;
+ theme: string | null;
+ accent: string | null;
+ website: string | null;
+ location: string | null;
+ username: string;
+ photoURL: string;
+ verified: boolean;
+ following: string[];
+ followers: string[];
+ createdAt: Timestamp;
+ updatedAt: Timestamp | null;
+ totalTweets: number;
+ totalPhotos: number;
+ pinnedTweet: string | null;
+ coverPhotoURL: string | null;
+};
+
+type Tweet = {
+ id: string;
+ text: string | null;
+ images: any | null;
+ parent: { id: string; username: string } | null;
+ userLikes: string[];
+ createdBy: string;
+ createdAt: Timestamp;
+ updatedAt: Timestamp | null;
+ userReplies: number;
+ userRetweets: string[];
+};
+
+type Chat = {
+ id: string;
+ type: "direct" | "group";
+ name?: string;
+ participants: string[];
+ createdAt: Timestamp;
+ updatedAt: Timestamp;
+ lastMessage?: {
+ text: string;
+ senderId: string;
+ timestamp: Timestamp;
+ };
+};
+
+type Message = {
+ id: string;
+ chatId: string;
+ senderId: string;
+ text: string;
+ createdAt: Timestamp;
+ updatedAt: Timestamp;
+ readBy: string[];
+};
+
+dotenv.config({ path: path.resolve(__dirname, "../../../../.env") });
+
+export const adapter = new Web3Adapter({
+ schemasPath: path.resolve(__dirname, "../web3adapter/mappings/"),
+ dbPath: path.resolve(process.env.BLABSY_MAPPING_DB_PATH as string),
+ registryUrl: process.env.PUBLIC_REGISTRY_URL as string,
+ platform: process.env.PUBLIC_BLABSY_BASE_URL as string,
+});
+
+export class WebhookController {
+ db: FirebaseFirestore.Firestore;
+
+ constructor() {
+ this.db = getFirestore();
+ // Bind the method to preserve 'this' context
+ this.handleWebhook = this.handleWebhook.bind(this);
+ }
+
+ async handleWebhook(req: Request, res: Response) {
+ try {
+ const { data, schemaId, id } = req.body;
+
+ console.log("received webhook????", req.body);
+
+ if (adapter.lockedIds.includes(id)) return;
+ console.log("processing -- not skipped");
+ adapter.addToLockedIds(id);
+
+ const mapping = Object.values(adapter.mapping).find(
+ (m) => m.schemaId === schemaId,
+ );
+ if (!mapping) throw new Error();
+ const tableName = mapping.tableName + "s";
+
+ const local = await adapter.fromGlobal({ data, mapping });
+
+ console.log(local);
+ //
+ // Get the local ID from the mapping database
+ const localId = await adapter.mappingDb.getLocalId(id);
+
+ if (localId) {
+ console.log("LOCAL, updating");
+ adapter.addToLockedIds(localId);
+ await this.updateRecord(tableName, localId, local.data);
+ } else {
+ console.log("NOT LOCAL, creating");
+ await this.createRecord(tableName, local.data, req.body.id);
+ }
+
+ res.status(200).json({ success: true });
+ } catch (error) {
+ console.error("Error handling webhook:", error);
+ res.status(500).json({ error: "Internal server error" });
+ }
+ }
+
+ private async createRecord(tableName: string, data: any, globalId: string) {
+ const chatId = data.chatId
+ ? data.chatId.split("(")[1].split(")")[0]
+ : null;
+
+ let collection;
+ if (tableName === "messages" && data.chatId) {
+ collection = this.db.collection(`chats/${chatId}/messages`);
+ } else {
+ collection = this.db.collection(tableName);
+ }
+
+ let docRef = collection.doc();
+
+ const mappedData = await this.mapDataToFirebase(tableName, data);
+ if (tableName === "users") {
+ docRef = collection.doc(data.ename);
+ } else {
+ // Use auto-generated ID for other tables
+ docRef = collection.doc();
+ }
+ await docRef.set(mappedData);
+
+ adapter.addToLockedIds(docRef.id);
+ adapter.addToLockedIds(globalId);
+ await adapter.mappingDb.storeMapping({
+ globalId: globalId,
+ localId: docRef.id,
+ });
+ }
+
+ private async updateRecord(tableName: string, localId: string, data: any) {
+ const collection = this.db.collection(tableName);
+ const docRef = collection.doc(localId);
+
+ adapter.addToLockedIds(docRef.id);
+
+ const docSnapshot = await docRef.get();
+
+ if (!docSnapshot.exists) {
+ console.warn(
+ `Document with ID '${localId}' does not exist in '${tableName}'. Skipping update.`,
+ );
+ return;
+ }
+
+ const mappedData = await this.mapDataToFirebase(tableName, data);
+ await docRef.update(mappedData);
+ }
+ private mapDataToFirebase(tableName: string, data: any): any {
+ const now = Timestamp.now();
+ console.log("MAPPING DATA TO ", tableName);
+
+ switch (tableName) {
+ case "users":
+ const result = this.mapUserData(data, now);
+ console.log("mappppped", result);
+ return result;
+ case "tweets":
+ return this.mapTweetData(data, now);
+ case "chats":
+ return this.mapChatData(data, now);
+ case "messages":
+ return this.mapMessageData(data, now);
+ default:
+ return data;
+ }
+ }
+
+ private mapUserData(data: any, now: Timestamp): Partial {
+ let userData: Record = {
+ bio: data.bio || null,
+ name: data.name,
+ theme: data.theme || null,
+ accent: data.accent || null,
+ website: null,
+ location: null,
+ username: data.username || data.ename.split("@")[1],
+ photoURL: data.photoURL ?? "/assets/twitter-avatar.jpg",
+ verified: data.verified || false,
+ following: data.following || [],
+ followers: data.followers || [],
+ createdAt: data.createdAt
+ ? Timestamp.fromDate(new Date(data.createdAt))
+ : now,
+ updatedAt: now,
+ totalTweets: data.totalTweets || 0,
+ totalPhotos: data.totalPhotos || 0,
+ pinnedTweet: data.pinnedTweet || null,
+ coverPhotoURL: data.coverPhotoURL || null,
+ };
+ if (data.ename) userData.id = data.ename;
+ return userData;
+ }
+
+ private async mapTweetData(
+ data: any,
+ now: Timestamp,
+ ): Promise> {
+ let createdBy = data.createdBy;
+ if (createdBy.includes("(") && createdBy.includes(")")) {
+ createdBy = createdBy.split("(")[1].split(")")[0];
+ }
+ const filteredResult = {};
+ for (const key of Object.keys(data)) {
+ if (data[key]) {
+ // @ts-ignore
+ filteredResult[key] = data[key];
+ }
+ }
+ const usersCollectionRef = this.db.collection("users");
+ const user = (await usersCollectionRef.doc(createdBy).get()).data();
+
+ const tweetData = {
+ ...filteredResult,
+ userLikes: data.userLikes
+ .filter((l: string) => !!l)
+ .map((u: string) => {
+ if (u.includes("(") && u.includes(")")) {
+ return u.split("(")[1].split(")")[0];
+ } else {
+ return u;
+ }
+ }),
+ createdBy,
+ images: data.images
+ ? data.images.map((i: string) => ({
+ src: i,
+ }))
+ : null,
+ parent:
+ data.parent && user
+ ? {
+ id: data.parent.split("(")[1].split(")")[0],
+ username: user.username,
+ }
+ : null,
+ createdAt: Timestamp.fromDate(new Date(Date.now())),
+ userRetweets: [],
+ userReplies: 0,
+ };
+ return tweetData;
+ }
+
+ private mapChatData(data: any, now: Timestamp): Partial {
+ return {
+ type: data.type || "direct",
+ name: data.name,
+ participants:
+ data.participants.map(
+ (p: string) => p.split("(")[1].split(")")[0],
+ ) || [],
+ createdAt: data.createdAt
+ ? Timestamp.fromDate(new Date(data.createdAt))
+ : now,
+ updatedAt: now,
+ lastMessage: data.lastMessage
+ ? {
+ ...data.lastMessage,
+ timestamp: Timestamp.fromDate(
+ new Date(data.lastMessage.timestamp),
+ ),
+ }
+ : null,
+ };
+ }
+
+ private mapMessageData(data: any, now: Timestamp): Partial {
+ return {
+ chatId: data.chatId.split("(")[1].split(")")[0],
+ senderId: data.senderId.split("(")[1].split(")")[0],
+ text: data.text,
+ createdAt: data.createdAt
+ ? Timestamp.fromDate(new Date(data.createdAt))
+ : now,
+ updatedAt: now,
+ readBy: data.readBy || [],
+ };
+ }
+}
diff --git a/platforms/blabsy-w3ds-auth-api/src/index.ts b/platforms/blabsy-w3ds-auth-api/src/index.ts
index 9eb37819..263c1f48 100644
--- a/platforms/blabsy-w3ds-auth-api/src/index.ts
+++ b/platforms/blabsy-w3ds-auth-api/src/index.ts
@@ -4,6 +4,9 @@ import cors from "cors";
import { config } from "dotenv";
import path from "path";
import { AuthController } from "./controllers/AuthController";
+import { initializeApp, cert, applicationDefault } from "firebase-admin/app";
+import { Web3Adapter } from "./web3adapter";
+import { WebhookController } from "./controllers/WebhookController";
config({ path: path.resolve(__dirname, "../../../.env") });
@@ -23,9 +26,33 @@ app.use(express.urlencoded({ limit: "50mb", extended: true }));
const authController = new AuthController();
+initializeApp({
+ credential: applicationDefault(),
+});
+
+// Initialize Web3Adapter
+const web3Adapter = new Web3Adapter();
+
+web3Adapter.initialize().catch((error) => {
+ console.error("Failed to initialize Web3Adapter:", error);
+ process.exit(1);
+});
+
+// Register webhook endpoint
+
+const webhookController = new WebhookController();
+
app.get("/api/auth/offer", authController.getOffer);
app.post("/api/auth", authController.login);
app.get("/api/auth/sessions/:id", authController.sseStream);
+app.post("/api/webhook", webhookController.handleWebhook);
+
+// Graceful shutdown
+process.on("SIGTERM", async () => {
+ console.log("SIGTERM received. Shutting down...");
+ await web3Adapter.shutdown();
+ process.exit(0);
+});
app.listen(port, () => {
console.log(`Server running on port ${port}`);
diff --git a/platforms/blabsy-w3ds-auth-api/src/web3adapter/index.ts b/platforms/blabsy-w3ds-auth-api/src/web3adapter/index.ts
new file mode 100644
index 00000000..63e962c9
--- /dev/null
+++ b/platforms/blabsy-w3ds-auth-api/src/web3adapter/index.ts
@@ -0,0 +1,107 @@
+import { getFirestore } from "firebase-admin/firestore";
+import { FirestoreWatcher } from "./watchers/firestoreWatcher";
+import path from "path";
+import dotenv from "dotenv";
+
+dotenv.config({ path: path.resolve(__dirname, "../../../../../.env") });
+
+export interface Web3AdapterConfig {
+ registryUrl: string;
+ webhookSecret: string;
+ webhookEndpoint: string;
+ pictiqueWebhookUrl: string;
+ pictiqueWebhookSecret: string;
+}
+
+export class Web3Adapter {
+ private readonly db = getFirestore();
+ private watchers: Map = new Map();
+
+ async initialize(): Promise {
+ console.log("Initializing Web3Adapter...");
+
+ // Initialize watchers for each collection
+ const collections = [
+ { name: "users", type: "user" },
+ { name: "tweets", type: "socialMediaPost" },
+ { name: "chats", type: "message" },
+ { name: "comments", type: "comment" },
+ ];
+
+ for (const { name, type } of collections) {
+ try {
+ console.log(`Setting up watcher for collection: ${name}`);
+ const collection = this.db.collection(name);
+ const watcher = new FirestoreWatcher(collection);
+ await watcher.start();
+ this.watchers.set(name, watcher);
+ console.log(`Successfully set up watcher for ${name}`);
+
+ // Special handling for messages using collection group
+ if (name === "chats") {
+ const messagesWatcher = new FirestoreWatcher(
+ this.db.collectionGroup("messages")
+ );
+ await messagesWatcher.start();
+ this.watchers.set("messages", messagesWatcher);
+ console.log("Successfully set up watcher for all messages");
+ }
+ } catch (error) {
+ console.error(`Failed to set up watcher for ${name}:`, error);
+ }
+ }
+
+ // Set up error handling for watchers
+ process.on("unhandledRejection", (error) => {
+ console.error("Unhandled promise rejection in watchers:", error);
+ // Attempt to restart watchers
+ this.restartWatchers();
+ });
+ }
+
+ private async restartWatchers(): Promise {
+ console.log("Attempting to restart watchers...");
+
+ // Stop all existing watchers
+ await this.shutdown();
+
+ // Wait a bit before restarting
+ await new Promise((resolve) => setTimeout(resolve, 1000));
+
+ // Reinitialize watchers
+ await this.initialize();
+ }
+
+ async shutdown(): Promise {
+ console.log("Shutting down Web3Adapter...");
+
+ // Stop all watchers
+ const stopPromises = Array.from(this.watchers.values()).map(
+ async (watcher) => {
+ try {
+ await watcher.stop();
+ } catch (error) {
+ console.error("Error stopping watcher:", error);
+ }
+ }
+ );
+
+ await Promise.all(stopPromises);
+ this.watchers.clear();
+ console.log("All watchers stopped");
+ }
+
+ // Method to manually trigger a watcher restart
+ async restartWatcher(collectionName: string): Promise {
+ const watcher = this.watchers.get(collectionName);
+ if (watcher) {
+ console.log(`Restarting watcher for ${collectionName}...`);
+ await watcher.stop();
+ const collection = this.db.collection(collectionName);
+ const newWatcher = new FirestoreWatcher(collection);
+ await newWatcher.start();
+ this.watchers.set(collectionName, newWatcher);
+ console.log(`Successfully restarted watcher for ${collectionName}`);
+ }
+ }
+}
diff --git a/platforms/blabsy-w3ds-auth-api/src/web3adapter/mappings/chat.mapping.json b/platforms/blabsy-w3ds-auth-api/src/web3adapter/mappings/chat.mapping.json
new file mode 100644
index 00000000..505aedcc
--- /dev/null
+++ b/platforms/blabsy-w3ds-auth-api/src/web3adapter/mappings/chat.mapping.json
@@ -0,0 +1,15 @@
+{
+ "tableName": "chat",
+ "schemaId": "550e8400-e29b-41d4-a716-446655440003",
+ "participants": "user(participants[])",
+ "ownerEnamePath": "users(participants[])",
+ "localToUniversalMap": {
+ "name": "name",
+ "type": "type",
+ "participants": "user(participants[]),participantIds",
+ "lastMessage": "lastMessageId",
+ "createdAt": "__date(__calc(createdAt._seconds * 1000)),createdAt",
+ "updatedAt": "__date(__calc(updatedAt._seconds * 1000)),updatedAt",
+ "isArchived": "isArchived"
+ }
+}
diff --git a/platforms/blabsy-w3ds-auth-api/src/web3adapter/mappings/message.mapping.json b/platforms/blabsy-w3ds-auth-api/src/web3adapter/mappings/message.mapping.json
new file mode 100644
index 00000000..3d683896
--- /dev/null
+++ b/platforms/blabsy-w3ds-auth-api/src/web3adapter/mappings/message.mapping.json
@@ -0,0 +1,15 @@
+{
+ "tableName": "message",
+ "schemaId": "550e8400-e29b-41d4-a716-446655440004",
+ "ownerEnamePath": "user(senderId)",
+ "localToUniversalMap": {
+ "chatId": "chat(chatId),chatId",
+ "senderId": "user(senderId),senderId",
+ "text": "content",
+ "type": "type",
+ "mediaUrl": "mediaUrl",
+ "createdAt": "__date(calc(createdAt._seconds * 1000)),createdAt",
+ "updatedAt": "__date(calc(updatedAt._seconds * 1000)),updatedAt",
+ "isArchived": "isArchived"
+ }
+}
diff --git a/platforms/blabsy-w3ds-auth-api/src/web3adapter/mappings/social-media-post.mapping.json b/platforms/blabsy-w3ds-auth-api/src/web3adapter/mappings/social-media-post.mapping.json
new file mode 100644
index 00000000..4b0b9fe0
--- /dev/null
+++ b/platforms/blabsy-w3ds-auth-api/src/web3adapter/mappings/social-media-post.mapping.json
@@ -0,0 +1,14 @@
+{
+ "tableName": "tweet",
+ "schemaId": "550e8400-e29b-41d4-a716-446655440001",
+ "ownerEnamePath": "user(createdBy)",
+ "localToUniversalMap": {
+ "text": "content",
+ "createdBy": "user(createdBy),authorId",
+ "images": "images[].src,mediaUrls",
+ "parent": "tweet(parent.id),parentPostId",
+ "userLikes": "user(userLikes)[],likedBy",
+ "createdAt": "__date(calc(createdAt._seconds * 1000)),createdAt",
+ "updatedAt": "__date(calc(updatedAt._seconds * 1000)),updatedAt"
+ }
+}
diff --git a/platforms/blabsy-w3ds-auth-api/src/web3adapter/mappings/user.mapping.json b/platforms/blabsy-w3ds-auth-api/src/web3adapter/mappings/user.mapping.json
new file mode 100644
index 00000000..edbe0f60
--- /dev/null
+++ b/platforms/blabsy-w3ds-auth-api/src/web3adapter/mappings/user.mapping.json
@@ -0,0 +1,18 @@
+{
+ "tableName": "user",
+ "schemaId": "550e8400-e29b-41d4-a716-446655440000",
+ "ownerEnamePath": "id",
+ "localToUniversalMap": {
+ "bio": "bio",
+ "username": "username",
+ "name": "displayName",
+ "photoURL": "avatarUrl",
+ "ename": "ename",
+ "coverPhotoURL": "bannerUrl",
+ "website": "website",
+ "location": "location",
+ "verified": "isVerified",
+ "createdAt": "__date(calc(createdAt._seconds * 1000)),createdAt",
+ "updatedAt": "__date(calc(updatedAt._seconds * 1000)),updatedAt"
+ }
+}
diff --git a/platforms/blabsy-w3ds-auth-api/src/web3adapter/watchers/firestoreWatcher.ts b/platforms/blabsy-w3ds-auth-api/src/web3adapter/watchers/firestoreWatcher.ts
new file mode 100644
index 00000000..1597efbc
--- /dev/null
+++ b/platforms/blabsy-w3ds-auth-api/src/web3adapter/watchers/firestoreWatcher.ts
@@ -0,0 +1,162 @@
+import {
+ DocumentChange,
+ DocumentData,
+ QuerySnapshot,
+ CollectionReference,
+ CollectionGroup,
+} from "firebase-admin/firestore";
+import path from "path";
+import dotenv from "dotenv";
+import { adapter } from "../../controllers/WebhookController";
+dotenv.config({ path: path.resolve(__dirname, "../../../../../.env") });
+
+export class FirestoreWatcher {
+ private unsubscribe: (() => void) | null = null;
+ private adapter = adapter;
+ private isProcessing = false;
+ private retryCount = 0;
+ private readonly maxRetries: number = 3;
+ private readonly retryDelay: number = 1000; // 1 second
+
+ constructor(
+ private readonly collection:
+ | CollectionReference
+ | CollectionGroup
+ ) {}
+
+ async start(): Promise {
+ const collectionPath =
+ this.collection instanceof CollectionReference
+ ? this.collection.path
+ : "collection group";
+
+ try {
+ // First, get all existing documents
+ const snapshot = await this.collection.get();
+ await this.processSnapshot(snapshot);
+
+ // Then set up real-time listener
+ this.unsubscribe = this.collection.onSnapshot(
+ async (snapshot) => {
+ if (this.isProcessing) {
+ console.log(
+ "Still processing previous snapshot, skipping..."
+ );
+ return;
+ }
+
+ try {
+ this.isProcessing = true;
+ await this.processSnapshot(snapshot);
+ this.retryCount = 0; // Reset retry count on success
+ } catch (error) {
+ console.error("Error processing snapshot:", error);
+ await this.handleError(error);
+ } finally {
+ this.isProcessing = false;
+ }
+ },
+ (error) => {
+ console.error("Error in Firestore listener:", error);
+ this.handleError(error);
+ }
+ );
+
+ console.log(`Successfully started watcher for ${collectionPath}`);
+ } catch (error) {
+ console.error(
+ `Failed to start watcher for ${collectionPath}:`,
+ error
+ );
+ throw error;
+ }
+ }
+
+ async stop(): Promise {
+ const collectionPath =
+ this.collection instanceof CollectionReference
+ ? this.collection.path
+ : "collection group";
+ console.log(`Stopping watcher for collection: ${collectionPath}`);
+
+ if (this.unsubscribe) {
+ this.unsubscribe();
+ this.unsubscribe = null;
+ console.log(`Successfully stopped watcher for ${collectionPath}`);
+ }
+ }
+
+ private async handleError(error: any): Promise {
+ if (this.retryCount < this.maxRetries) {
+ this.retryCount++;
+ console.log(`Retrying (${this.retryCount}/${this.maxRetries})...`);
+ await new Promise((resolve) =>
+ setTimeout(resolve, this.retryDelay * this.retryCount)
+ );
+ await this.start();
+ } else {
+ console.error("Max retries reached, stopping watcher");
+ await this.stop();
+ }
+ }
+
+ private async processSnapshot(snapshot: QuerySnapshot): Promise {
+ const changes = snapshot.docChanges();
+ const collectionPath =
+ this.collection instanceof CollectionReference
+ ? this.collection.path
+ : "collection group";
+ console.log(
+ `Processing ${changes.length} changes in ${collectionPath}`
+ );
+
+ for (const change of changes) {
+ const doc = change.doc;
+ const data = doc.data();
+
+ try {
+ switch (change.type) {
+ case "added":
+ case "modified":
+ setTimeout(() => {
+ console.log(
+ `${collectionPath} - processing - ${doc.id}`
+ );
+ if (adapter.lockedIds.includes(doc.id)) return;
+ this.handleCreateOrUpdate(doc, data);
+ }, 2_000);
+ break;
+ case "removed":
+ console.log(`Document removed: ${doc.id}`);
+ // Handle document removal if needed
+ break;
+ }
+ } catch (error) {
+ console.error(
+ `Error processing ${change.type} for document ${doc.id}:`,
+ error
+ );
+ // Continue processing other changes even if one fails
+ }
+ }
+ }
+
+ private async handleCreateOrUpdate(
+ doc: FirebaseFirestore.QueryDocumentSnapshot,
+ data: DocumentData
+ ): Promise {
+ const tableParts = doc.ref.path.split("/");
+ // -2 cuz -1 gives last entry and we need second last which would
+ // be the path specifier
+ const tableNameRaw = tableParts[tableParts.length - 2];
+
+ const tableName = tableNameRaw.slice(0, tableNameRaw.length - 1);
+
+ await this.adapter
+ .handleChange({
+ data: { ...data, id: doc.id },
+ tableName,
+ })
+ .catch((e) => console.error(e));
+ }
+}
diff --git a/platforms/blabsy/.env.development b/platforms/blabsy/.env.development
index 9aaca90b..b68f47e9 100644
--- a/platforms/blabsy/.env.development
+++ b/platforms/blabsy/.env.development
@@ -1,6 +1,9 @@
# Dev URL
-NEXT_PUBLIC_URL=http://localhost
-NEXT_PUBLIC_BASE_URL=http://192.168.0.226:4444
+# NEXT_PUBLIC_URL=http://localhost
+NEXT_PUBLIC_BASE_URL=http://192.168.0.231:4444
+
+NEXT_PUBLIC_URL=https://blabsy.w3ds-prototype.merul.org
+# NEXT_PUBLIC_BASE_URL=https://blabsy.w3ds-prototype.merul.org
# Emulator
NEXT_PUBLIC_USE_EMULATOR=false
diff --git a/platforms/blabsy/AUTHENTICATION_SECURITY.md b/platforms/blabsy/AUTHENTICATION_SECURITY.md
new file mode 100644
index 00000000..60833379
--- /dev/null
+++ b/platforms/blabsy/AUTHENTICATION_SECURITY.md
@@ -0,0 +1,46 @@
+# Authentication Security Changes
+
+## Overview
+This document outlines the security changes made to prevent automatic user creation when users sign in but don't exist in the database.
+
+## Problem
+Previously, when a user signed in with a custom token or Google authentication, if they didn't exist in the database, the system would automatically create a new user record. This was a security vulnerability as it allowed unauthorized users to create accounts.
+
+## Solution
+The following changes were implemented to prevent automatic user creation:
+
+### 1. Frontend Authentication Context (`src/lib/context/auth-context.tsx`)
+- **Modified `manageUser` function**: Removed automatic user creation logic
+- **Added error handling**: When a user doesn't exist in the database, an error is set and the user is signed out
+- **Cleaned up imports**: Removed unused imports related to user creation
+
+### 2. Authentication Layout (`src/components/layout/auth-layout.tsx`)
+- **Added error display**: Shows authentication errors to users with a clear message
+- **Added retry functionality**: Users can retry authentication if needed
+- **Improved UX**: Clear messaging about contacting support for account registration
+
+### 3. Firestore Security Rules (`firestore.rules`)
+- **Restricted user creation**: Only admin users can create new user documents
+- **Maintained read access**: Authenticated users can still read user data
+- **Controlled updates**: Users can only update their own data or admin can update any user
+
+## User Experience
+When a user tries to sign in but doesn't exist in the database:
+
+1. They will see an error message: "User not found in database. Please contact support to register your account."
+2. They will be automatically signed out
+3. They can retry authentication or contact support
+4. No new user record is created
+
+## Admin User Creation
+Only users with admin privileges (username: 'ccrsxx') can create new user records. This ensures that user creation is controlled and audited.
+
+## Backend Considerations
+The webhook controller in `blabsy-w3ds-auth-api` can still create users through webhooks, but this is controlled by the external system and not by user authentication attempts.
+
+## Testing
+To test this fix:
+1. Try to sign in with a custom token for a user that doesn't exist in the database
+2. Verify that no new user record is created
+3. Verify that an appropriate error message is displayed
+4. Verify that the user is signed out automatically
\ No newline at end of file
diff --git a/platforms/blabsy/firestore.rules b/platforms/blabsy/firestore.rules
index ebc09b8c..8dfe4997 100644
--- a/platforms/blabsy/firestore.rules
+++ b/platforms/blabsy/firestore.rules
@@ -32,7 +32,10 @@ service cloud.firestore {
}
match /users/{document=**} {
- allow read, write: if request.auth != null;
+ allow read: if request.auth != null;
+ allow create: if isAdmin();
+ allow update: if request.auth != null && (request.auth.uid == resource.data.id || isAdmin());
+ allow delete: if isAdmin();
}
match /chats/{chatId} {
diff --git a/platforms/blabsy/package.json b/platforms/blabsy/package.json
index 5d37e2b7..34971e86 100644
--- a/platforms/blabsy/package.json
+++ b/platforms/blabsy/package.json
@@ -4,7 +4,7 @@
"private": true,
"scripts": {
"emulators": "firebase emulators:start --only firestore,auth,storage,functions",
- "dev": "next dev -p 80",
+ "dev": "next dev -p 8080",
"dev:emulators": "concurrently npm:dev npm:emulators",
"build": "next build",
"start": "next start",
diff --git a/platforms/blabsy/src/components/aside/aside.tsx b/platforms/blabsy/src/components/aside/aside.tsx
index 9517241b..07587dda 100644
--- a/platforms/blabsy/src/components/aside/aside.tsx
+++ b/platforms/blabsy/src/components/aside/aside.tsx
@@ -8,9 +8,9 @@ type AsideProps = {
};
export function Aside({ children }: AsideProps): JSX.Element | null {
- const { width } = useWindow();
+ const { windowSize } = useWindow();
- if (width < 1024) return null;
+ if (windowSize.width < 1024) return null;
return (
- >
- )}
-
- {userData && (
- <>
-
- {children}
+
+ {userData && (
+ <>
+
+ {children}
+ >
+ )}
>
- )}
- >
- );
+ );
}
diff --git a/platforms/blabsy/src/components/login/login-main.tsx b/platforms/blabsy/src/components/login/login-main.tsx
index 7f6eec02..be3288c1 100644
--- a/platforms/blabsy/src/components/login/login-main.tsx
+++ b/platforms/blabsy/src/components/login/login-main.tsx
@@ -65,7 +65,11 @@ export function LoginMain(): JSX.Element {
Join Blabsy today.
-
{qr && }
+
diff --git a/platforms/blabsy/src/components/modal/mobile-sidebar-modal.tsx b/platforms/blabsy/src/components/modal/mobile-sidebar-modal.tsx
index 34f89550..661831df 100644
--- a/platforms/blabsy/src/components/modal/mobile-sidebar-modal.tsx
+++ b/platforms/blabsy/src/components/modal/mobile-sidebar-modal.tsx
@@ -120,7 +120,7 @@ export function MobileSidebarModal({
{open && (
-
-
-
-
+
+
+
+ {open && (
+
+
+
+
+
+
+
+
+ {({ active }): JSX.Element => (
+
+ )}
+
+
-
-
-
+ >
+
+
+
+ )}
+
+ >
)}
-
- >
- )}
-
- >
+
+ >
);
}
diff --git a/platforms/blabsy/src/components/tweet/tweet.tsx b/platforms/blabsy/src/components/tweet/tweet.tsx
index 7c382a36..6af823cd 100644
--- a/platforms/blabsy/src/components/tweet/tweet.tsx
+++ b/platforms/blabsy/src/components/tweet/tweet.tsx
@@ -201,7 +201,7 @@ export function Tweet(tweet: TweetProps): JSX.Element {
)}
- {images && (
+ {images && images.length > 0 && (
-
-
-
-
- );
+ console.log(typeof src);
+
+ return (
+
+
+
+
+
+ );
}
diff --git a/platforms/blabsy/src/components/view/view-tweet.tsx b/platforms/blabsy/src/components/view/view-tweet.tsx
index 8ff92e35..8663e606 100644
--- a/platforms/blabsy/src/components/view/view-tweet.tsx
+++ b/platforms/blabsy/src/components/view/view-tweet.tsx
@@ -20,140 +20,150 @@ import type { User } from '@lib/types/user';
import type { Tweet } from '@lib/types/tweet';
type ViewTweetProps = Tweet & {
- user: User;
- viewTweetRef?: RefObject;
+ user: User;
+ viewTweetRef?: RefObject;
};
export function ViewTweet(tweet: ViewTweetProps): JSX.Element {
- const {
- id: tweetId,
- text,
- images,
- parent,
- userLikes,
- createdBy,
- createdAt,
- userRetweets,
- userReplies,
- viewTweetRef,
- user: tweetUserData
- } = tweet;
+ const {
+ id: tweetId,
+ text,
+ images,
+ parent,
+ userLikes,
+ createdBy,
+ createdAt,
+ userRetweets,
+ userReplies,
+ viewTweetRef,
+ user: tweetUserData
+ } = tweet;
- const { id: ownerId, name, username, verified, photoURL } = tweetUserData;
+ const { id: ownerId, name, username, verified, photoURL } = tweetUserData;
- const { user } = useAuth();
+ const { user } = useAuth();
- const { open, openModal, closeModal } = useModal();
+ const { open, openModal, closeModal } = useModal();
- const tweetLink = `/tweet/${tweetId}`;
+ const tweetLink = `/tweet/${tweetId}`;
- const userId = user?.id as string;
+ const userId = user?.id as string;
- const isOwner = userId === createdBy;
+ const isOwner = userId === createdBy;
- const reply = !!parent;
+ const reply = !!parent;
- const { id: parentId, username: parentUsername = username } = parent ?? {};
+ const { id: parentId, username: parentUsername = username } = parent ?? {};
- return (
-
-
-
-
-
- {reply && (
-
-
-
- )}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+ reply && 'scroll-m-[3.25rem] pt-0'
+ )}
+ {...variants}
+ animate={{ ...variants.animate, transition: { duration: 0.2 } }}
+ exit={undefined}
+ ref={viewTweetRef}
+ >
+
+
+
+
+ {reply && (
+
+
+
+ )}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 0}
+ createdBy={createdBy}
+ />
+
+
+
-
-
-
- {reply && (
-
- Replying to{' '}
-
-
- @{parentUsername}
-
-
-
- )}
-
- {text && (
-
{text}
- )}
- {images && (
-
- )}
-
+ Replying to{' '}
+
+
+ @{parentUsername}
+
+
+
+ )}
+
+ {text && (
+
+ {text}
+
+ )}
+ {images && images.length > 0 && (
+
+ )}
+
-
-
-
-
-
-
- );
+ >
+
+
+
+
+
+
+ );
}
diff --git a/platforms/blabsy/src/lib/context/auth-context.tsx b/platforms/blabsy/src/lib/context/auth-context.tsx
index 194711da..75eabeae 100644
--- a/platforms/blabsy/src/lib/context/auth-context.tsx
+++ b/platforms/blabsy/src/lib/context/auth-context.tsx
@@ -9,24 +9,18 @@ import {
import {
doc,
getDoc,
- setDoc,
- onSnapshot,
- serverTimestamp
+ onSnapshot
} from 'firebase/firestore';
import { auth } from '@lib/firebase/app';
import {
usersCollection,
- userStatsCollection,
userBookmarksCollection
} from '@lib/firebase/collections';
-import { getRandomId, getRandomInt } from '@lib/random';
-import { checkUsernameAvailability } from '@lib/firebase/utils';
+import { getRandomId } from '@lib/random';
import type { ReactNode } from 'react';
import type { User as AuthUser } from 'firebase/auth';
-import type { WithFieldValue } from 'firebase/firestore';
import type { User } from '@lib/types/user';
import type { Bookmark } from '@lib/types/bookmark';
-import type { Stats } from '@lib/types/stats';
type AuthContext = {
user: User | null;
@@ -57,71 +51,18 @@ export function AuthContextProvider({
useEffect(() => {
const manageUser = async (authUser: AuthUser): Promise
=> {
const { uid, displayName, photoURL } = authUser;
+ console.log(uid);
const userSnapshot = await getDoc(doc(usersCollection, uid));
if (!userSnapshot.exists()) {
- let available = false;
- let randomUsername = '';
-
- while (!available) {
- const normalizeName = displayName
- ?.replace(/\s/g, '')
- .toLowerCase();
- const randomInt = getRandomInt(1, 10_000);
-
- randomUsername = `${normalizeName as string}${randomInt}`;
-
- const isUsernameAvailable = await checkUsernameAvailability(
- randomUsername
- );
-
- if (isUsernameAvailable) available = true;
- }
-
- const userData: WithFieldValue = {
- id: uid,
- bio: null,
- name: displayName as string,
- theme: null,
- accent: null,
- website: null,
- location: null,
- photoURL: photoURL ?? '/assets/twitter-avatar.jpg',
- username: randomUsername,
- verified: false,
- following: [],
- followers: [],
- createdAt: serverTimestamp(),
- updatedAt: null,
- totalTweets: 0,
- totalPhotos: 0,
- pinnedTweet: null,
- coverPhotoURL: null
- };
-
- const userStatsData: WithFieldValue = {
- likes: [],
- tweets: [],
- updatedAt: null
- };
-
- try {
- await Promise.all([
- setDoc(doc(usersCollection, uid), userData),
- setDoc(
- doc(userStatsCollection(uid), 'stats'),
- userStatsData
- )
- ]);
-
- const newUser = (
- await getDoc(doc(usersCollection, uid))
- ).data();
- setUser(newUser as User);
- } catch (error) {
- setError(error as Error);
- }
+ // User doesn't exist in database - don't create automatically
+ console.error(`User ${uid} not found in database. User must be pre-registered.`);
+ setError(new Error('User not found in database. Please contact support to register your account.'));
+ setLoading(false);
+ // Sign out the user since they shouldn't be authenticated
+ await signOutFirebase(auth);
+ return;
} else {
const userData = userSnapshot.data();
setUser(userData);
diff --git a/platforms/blabsy/src/lib/context/chat-context.tsx b/platforms/blabsy/src/lib/context/chat-context.tsx
index 727ccc8d..b31e5c57 100644
--- a/platforms/blabsy/src/lib/context/chat-context.tsx
+++ b/platforms/blabsy/src/lib/context/chat-context.tsx
@@ -71,10 +71,7 @@ export function ChatContextProvider({
const unsubscribe = onSnapshot(
chatsQuery,
(snapshot) => {
- const chatsData = snapshot.docs.map((doc) => ({
- id: doc.id,
- ...doc.data()
- }));
+ const chatsData = snapshot.docs.map((doc) => doc.data());
setChats(chatsData);
setLoading(false);
},
diff --git a/platforms/blabsy/src/lib/firebase/utils.ts b/platforms/blabsy/src/lib/firebase/utils.ts
index 7973b8ff..d6dc301b 100644
--- a/platforms/blabsy/src/lib/firebase/utils.ts
+++ b/platforms/blabsy/src/lib/firebase/utils.ts
@@ -1,451 +1,485 @@
import {
- doc,
- query,
- where,
- limit,
- setDoc,
- getDocs,
- updateDoc,
- deleteDoc,
- increment,
- writeBatch,
- arrayUnion,
- arrayRemove,
- serverTimestamp,
- getCountFromServer,
- getDoc
+ doc,
+ query,
+ where,
+ limit,
+ setDoc,
+ getDocs,
+ updateDoc,
+ deleteDoc,
+ increment,
+ writeBatch,
+ arrayUnion,
+ arrayRemove,
+ serverTimestamp,
+ getCountFromServer,
+ getDoc
} from 'firebase/firestore';
import { ref, uploadBytesResumable, getDownloadURL } from 'firebase/storage';
import { db, storage } from './app';
import {
- usersCollection,
- tweetsCollection,
- userStatsCollection,
- userBookmarksCollection,
- chatsCollection,
- chatMessagesCollection
+ usersCollection,
+ tweetsCollection,
+ userStatsCollection,
+ userBookmarksCollection,
+ chatsCollection,
+ chatMessagesCollection
} from './collections';
import type { WithFieldValue, Query } from 'firebase/firestore';
import type { EditableUserData } from '@lib/types/user';
import type { FilesWithId, ImagesPreview } from '@lib/types/file';
import type { Bookmark } from '@lib/types/bookmark';
import type { Theme, Accent } from '@lib/types/theme';
-import type { Chat, Message } from '@lib/types/chat';
+import type { Chat } from '@lib/types/chat';
+import type { Message } from '@lib/types/message';
+import type { Stats } from '@lib/types/stats';
export async function checkUsernameAvailability(
- username: string
+ username: string
): Promise {
- const { empty } = await getDocs(
- query(usersCollection, where('username', '==', username), limit(1))
- );
- return empty;
+ const { empty } = await getDocs(
+ query(usersCollection, where('username', '==', username), limit(1))
+ );
+ return empty;
}
export async function getCollectionCount(
- collection: Query
+ collection: Query
): Promise {
- const snapshot = await getCountFromServer(collection);
- return snapshot.data().count;
+ const snapshot = await getCountFromServer(collection);
+ return snapshot.data().count;
}
export async function updateUserData(
- userId: string,
- userData: EditableUserData
+ userId: string,
+ userData: EditableUserData
): Promise {
- const userRef = doc(usersCollection, userId);
- await updateDoc(userRef, {
- ...userData,
- updatedAt: serverTimestamp()
- });
+ const userRef = doc(usersCollection, userId);
+ await updateDoc(userRef, {
+ ...userData,
+ updatedAt: serverTimestamp()
+ });
}
export async function updateUserTheme(
- userId: string,
- themeData: { theme?: Theme; accent?: Accent }
+ userId: string,
+ themeData: { theme?: Theme; accent?: Accent }
): Promise {
- const userRef = doc(usersCollection, userId);
- await updateDoc(userRef, { ...themeData });
+ const userRef = doc(usersCollection, userId);
+ await updateDoc(userRef, { ...themeData });
}
export async function updateUsername(
- userId: string,
- username?: string
+ userId: string,
+ username?: string
): Promise {
- const userRef = doc(usersCollection, userId);
- await updateDoc(userRef, {
- ...(username && { username }),
- updatedAt: serverTimestamp()
- });
+ const userRef = doc(usersCollection, userId);
+ await updateDoc(userRef, {
+ ...(username && { username }),
+ updatedAt: serverTimestamp()
+ });
}
export async function managePinnedTweet(
- type: 'pin' | 'unpin',
- userId: string,
- tweetId: string
+ type: 'pin' | 'unpin',
+ userId: string,
+ tweetId: string
): Promise {
- const userRef = doc(usersCollection, userId);
- await updateDoc(userRef, {
- updatedAt: serverTimestamp(),
- pinnedTweet: type === 'pin' ? tweetId : null
- });
+ const userRef = doc(usersCollection, userId);
+ await updateDoc(userRef, {
+ updatedAt: serverTimestamp(),
+ pinnedTweet: type === 'pin' ? tweetId : null
+ });
}
export async function manageFollow(
- type: 'follow' | 'unfollow',
- userId: string,
- targetUserId: string
+ type: 'follow' | 'unfollow',
+ userId: string,
+ targetUserId: string
): Promise {
- const batch = writeBatch(db);
-
- const userDocRef = doc(usersCollection, userId);
- const targetUserDocRef = doc(usersCollection, targetUserId);
+ const batch = writeBatch(db);
- if (type === 'follow') {
- batch.update(userDocRef, {
- following: arrayUnion(targetUserId),
- updatedAt: serverTimestamp()
- });
- batch.update(targetUserDocRef, {
- followers: arrayUnion(userId),
- updatedAt: serverTimestamp()
- });
- } else {
- batch.update(userDocRef, {
- following: arrayRemove(targetUserId),
- updatedAt: serverTimestamp()
- });
- batch.update(targetUserDocRef, {
- followers: arrayRemove(userId),
- updatedAt: serverTimestamp()
- });
- }
+ const userDocRef = doc(usersCollection, userId);
+ const targetUserDocRef = doc(usersCollection, targetUserId);
+
+ if (type === 'follow') {
+ batch.update(userDocRef, {
+ following: arrayUnion(targetUserId),
+ updatedAt: serverTimestamp()
+ });
+ batch.update(targetUserDocRef, {
+ followers: arrayUnion(userId),
+ updatedAt: serverTimestamp()
+ });
+ } else {
+ batch.update(userDocRef, {
+ following: arrayRemove(targetUserId),
+ updatedAt: serverTimestamp()
+ });
+ batch.update(targetUserDocRef, {
+ followers: arrayRemove(userId),
+ updatedAt: serverTimestamp()
+ });
+ }
- await batch.commit();
+ await batch.commit();
}
export async function removeTweet(tweetId: string): Promise {
- const userRef = doc(tweetsCollection, tweetId);
- await deleteDoc(userRef);
+ const userRef = doc(tweetsCollection, tweetId);
+ await deleteDoc(userRef);
}
export async function uploadImages(
- userId: string,
- files: FilesWithId
+ userId: string,
+ files: FilesWithId
): Promise {
- if (!files.length) return null;
+ if (!files.length) return null;
- const imagesPreview = await Promise.all(
- files.map(async (file) => {
- const { id, name: alt, type } = file;
+ const imagesPreview = await Promise.all(
+ files.map(async (file) => {
+ const { id, name: alt, type } = file;
- const storageRef = ref(storage, `images/${userId}/${id}`);
+ const storageRef = ref(storage, `images/${userId}/${id}`);
- await uploadBytesResumable(storageRef, file);
+ await uploadBytesResumable(storageRef, file);
- const src = await getDownloadURL(storageRef);
+ const src = await getDownloadURL(storageRef);
- return { id, src, alt, type };
- })
- );
+ return { id, src, alt, type };
+ })
+ );
- return imagesPreview;
+ return imagesPreview;
}
export async function manageReply(
- type: 'increment' | 'decrement',
- tweetId: string
+ type: 'increment' | 'decrement',
+ tweetId: string
): Promise {
- const tweetRef = doc(tweetsCollection, tweetId);
+ const tweetRef = doc(tweetsCollection, tweetId);
- try {
- await updateDoc(tweetRef, {
- userReplies: increment(type === 'increment' ? 1 : -1),
- updatedAt: serverTimestamp()
- });
- } catch {
- // do nothing, because parent tweet was already deleted
- }
+ try {
+ await updateDoc(tweetRef, {
+ userReplies: increment(type === 'increment' ? 1 : -1),
+ updatedAt: serverTimestamp()
+ });
+ } catch {
+ // do nothing, because parent tweet was already deleted
+ }
}
export async function manageTotalTweets(
- type: 'increment' | 'decrement',
- userId: string
+ type: 'increment' | 'decrement',
+ userId: string
): Promise {
- const userRef = doc(usersCollection, userId);
- await updateDoc(userRef, {
- totalTweets: increment(type === 'increment' ? 1 : -1),
- updatedAt: serverTimestamp()
- });
+ const userRef = doc(usersCollection, userId);
+ await updateDoc(userRef, {
+ totalTweets: increment(type === 'increment' ? 1 : -1),
+ updatedAt: serverTimestamp()
+ });
}
export async function manageTotalPhotos(
- type: 'increment' | 'decrement',
- userId: string
+ type: 'increment' | 'decrement',
+ userId: string
): Promise {
- const userRef = doc(usersCollection, userId);
- await updateDoc(userRef, {
- totalPhotos: increment(type === 'increment' ? 1 : -1),
- updatedAt: serverTimestamp()
- });
+ const userRef = doc(usersCollection, userId);
+ await updateDoc(userRef, {
+ totalPhotos: increment(type === 'increment' ? 1 : -1),
+ updatedAt: serverTimestamp()
+ });
}
-export function manageRetweet(
- type: 'retweet' | 'unretweet',
- userId: string,
- tweetId: string
-) {
- return async (): Promise => {
- const batch = writeBatch(db);
-
- const tweetRef = doc(tweetsCollection, tweetId);
+export async function ensureUserStatsExists(userId: string): Promise {
const userStatsRef = doc(userStatsCollection(userId), 'stats');
-
- if (type === 'retweet') {
- batch.update(tweetRef, {
- userRetweets: arrayUnion(userId),
- updatedAt: serverTimestamp()
- });
- batch.update(userStatsRef, {
- tweets: arrayUnion(tweetId),
- updatedAt: serverTimestamp()
- });
- } else {
- batch.update(tweetRef, {
- userRetweets: arrayRemove(userId),
- updatedAt: serverTimestamp()
- });
- batch.update(userStatsRef, {
- tweets: arrayRemove(tweetId),
- updatedAt: serverTimestamp()
- });
+
+ // Check if the stats document exists
+ const statsDoc = await getDoc(userStatsRef);
+
+ if (!statsDoc.exists()) {
+ // Create the stats document with default values
+ const defaultStatsData: WithFieldValue = {
+ likes: [],
+ tweets: [],
+ updatedAt: serverTimestamp()
+ };
+
+ await setDoc(userStatsRef, defaultStatsData);
+ console.log(`Created stats document for user ${userId}`);
}
+}
- await batch.commit();
- };
+export function manageRetweet(
+ type: 'retweet' | 'unretweet',
+ userId: string,
+ tweetId: string
+) {
+ return async (): Promise => {
+ const batch = writeBatch(db);
+
+ const tweetRef = doc(tweetsCollection, tweetId);
+ const userStatsRef = doc(userStatsCollection(userId), 'stats');
+
+ // Ensure stats document exists before updating
+ await ensureUserStatsExists(userId);
+
+ if (type === 'retweet') {
+ batch.update(tweetRef, {
+ userRetweets: arrayUnion(userId),
+ updatedAt: serverTimestamp()
+ });
+ batch.set(userStatsRef, {
+ tweets: arrayUnion(tweetId),
+ updatedAt: serverTimestamp()
+ }, { merge: true });
+ } else {
+ batch.update(tweetRef, {
+ userRetweets: arrayRemove(userId),
+ updatedAt: serverTimestamp()
+ });
+ batch.set(userStatsRef, {
+ tweets: arrayRemove(tweetId),
+ updatedAt: serverTimestamp()
+ }, { merge: true });
+ }
+
+ await batch.commit();
+ };
}
export function manageLike(
- type: 'like' | 'unlike',
- userId: string,
- tweetId: string
+ type: 'like' | 'unlike',
+ userId: string,
+ tweetId: string
) {
- return async (): Promise => {
- const batch = writeBatch(db);
-
- const userStatsRef = doc(userStatsCollection(userId), 'stats');
- const tweetRef = doc(tweetsCollection, tweetId);
-
- if (type === 'like') {
- batch.update(tweetRef, {
- userLikes: arrayUnion(userId),
- updatedAt: serverTimestamp()
- });
- batch.update(userStatsRef, {
- likes: arrayUnion(tweetId),
- updatedAt: serverTimestamp()
- });
- } else {
- batch.update(tweetRef, {
- userLikes: arrayRemove(userId),
- updatedAt: serverTimestamp()
- });
- batch.update(userStatsRef, {
- likes: arrayRemove(tweetId),
- updatedAt: serverTimestamp()
- });
- }
-
- await batch.commit();
- };
+ return async (): Promise => {
+ const batch = writeBatch(db);
+
+ const userStatsRef = doc(userStatsCollection(userId), 'stats');
+ const tweetRef = doc(tweetsCollection, tweetId);
+
+ // Ensure stats document exists before updating
+ await ensureUserStatsExists(userId);
+
+ if (type === 'like') {
+ batch.update(tweetRef, {
+ userLikes: arrayUnion(userId),
+ updatedAt: serverTimestamp()
+ });
+ batch.set(userStatsRef, {
+ likes: arrayUnion(tweetId),
+ updatedAt: serverTimestamp()
+ }, { merge: true });
+ } else {
+ batch.update(tweetRef, {
+ userLikes: arrayRemove(userId),
+ updatedAt: serverTimestamp()
+ });
+ batch.set(userStatsRef, {
+ likes: arrayRemove(tweetId),
+ updatedAt: serverTimestamp()
+ }, { merge: true });
+ }
+
+ await batch.commit();
+ };
}
export async function manageBookmark(
- type: 'bookmark' | 'unbookmark',
- userId: string,
- tweetId: string
+ type: 'bookmark' | 'unbookmark',
+ userId: string,
+ tweetId: string
): Promise {
- const bookmarkRef = doc(userBookmarksCollection(userId), tweetId);
-
- if (type === 'bookmark') {
- const bookmarkData: WithFieldValue = {
- id: tweetId,
- createdAt: serverTimestamp()
- };
- await setDoc(bookmarkRef, bookmarkData);
- } else await deleteDoc(bookmarkRef);
+ const bookmarkRef = doc(userBookmarksCollection(userId), tweetId);
+
+ if (type === 'bookmark') {
+ const bookmarkData: WithFieldValue = {
+ id: tweetId,
+ createdAt: serverTimestamp()
+ };
+ await setDoc(bookmarkRef, bookmarkData);
+ } else await deleteDoc(bookmarkRef);
}
export async function clearAllBookmarks(userId: string): Promise {
- const bookmarksRef = userBookmarksCollection(userId);
- const bookmarksSnapshot = await getDocs(bookmarksRef);
+ const bookmarksRef = userBookmarksCollection(userId);
+ const bookmarksSnapshot = await getDocs(bookmarksRef);
- const batch = writeBatch(db);
+ const batch = writeBatch(db);
- bookmarksSnapshot.forEach(({ ref }) => batch.delete(ref));
+ bookmarksSnapshot.forEach(({ ref }) => batch.delete(ref));
- await batch.commit();
+ await batch.commit();
}
export async function createChat(
- type: 'direct' | 'group',
- participants: string[],
- name?: string
+ type: 'direct' | 'group',
+ participants: string[],
+ name?: string
): Promise {
- const chatRef = doc(chatsCollection);
- const chatData: WithFieldValue = {
- id: chatRef.id,
- type,
- participants,
- name,
- createdAt: serverTimestamp(),
- updatedAt: serverTimestamp()
- };
-
- await setDoc(chatRef, chatData);
- return chatRef.id;
+ const chatRef = doc(chatsCollection);
+ const chatData: WithFieldValue = {
+ id: chatRef.id,
+ type,
+ participants,
+ name,
+ createdAt: serverTimestamp(),
+ updatedAt: serverTimestamp()
+ };
+
+ await setDoc(chatRef, chatData);
+ return chatRef.id;
}
export async function sendMessage(
- chatId: string,
- senderId: string,
- text: string
+ chatId: string,
+ senderId: string,
+ text: string
): Promise {
- const batch = writeBatch(db);
-
- const messageId = doc(chatsCollection).id; // Generate a new ID
- const messageRef = doc(chatMessagesCollection(chatId), messageId);
-
- console.log('error4', chatsCollection, chatId)
- const chatRef = doc(chatsCollection, chatId);
-
- const messageData: WithFieldValue = {
- id: messageId,
- chatId,
- senderId,
- text,
- createdAt: serverTimestamp(),
- updatedAt: serverTimestamp(),
- readBy: [senderId]
- };
-
- batch.set(messageRef, messageData);
- batch.update(chatRef, {
- lastMessage: {
- text,
- senderId,
- timestamp: serverTimestamp()
- },
- updatedAt: serverTimestamp()
- });
-
- await batch.commit();
+ const batch = writeBatch(db);
+
+ const messageId = doc(chatsCollection).id; // Generate a new ID
+ const messageRef = doc(chatMessagesCollection(chatId), messageId);
+
+ console.log('error4', chatsCollection, chatId);
+ const chatRef = doc(chatsCollection, chatId);
+
+ const messageData: WithFieldValue = {
+ id: messageId,
+ chatId,
+ senderId,
+ text,
+ createdAt: serverTimestamp(),
+ updatedAt: serverTimestamp(),
+ readBy: [senderId]
+ };
+
+ batch.set(messageRef, messageData);
+ batch.update(chatRef, {
+ lastMessage: {
+ text,
+ senderId,
+ timestamp: serverTimestamp()
+ },
+ updatedAt: serverTimestamp()
+ });
+
+ await batch.commit();
}
export async function markMessageAsRead(
- chatId: string,
- messageId: string,
- userId: string
+ chatId: string,
+ messageId: string,
+ userId: string
): Promise {
- console.log('[markMessageAsRead] Starting with:', { chatId, messageId, userId });
-
- // First check if the user is a participant in the chat
- const chatRef = doc(chatsCollection, chatId);
- console.log('[markMessageAsRead] Chat ref path:', chatRef.path);
-
- const chatDoc = await getDoc(chatRef);
- console.log('[markMessageAsRead] Chat doc exists:', chatDoc.exists());
-
- if (!chatDoc.exists()) {
- console.error('[markMessageAsRead] Chat not found:', chatId);
- throw new Error('Chat not found');
- }
-
- const chatData = chatDoc.data();
- console.log('[markMessageAsRead] Chat data:', {
- participants: chatData.participants,
- userId,
- isParticipant: chatData.participants.includes(userId)
- });
-
- if (!chatData.participants.includes(userId)) {
- console.error('[markMessageAsRead] User not in participants:', { userId, participants: chatData.participants });
- throw new Error('User is not a participant in this chat');
- }
-
- // Then update the message
- const messageRef = doc(chatMessagesCollection(chatId), messageId);
- console.log('[markMessageAsRead] Message ref path:', messageRef.path);
-
- try {
- await updateDoc(messageRef, {
- readBy: arrayUnion(userId),
- updatedAt: serverTimestamp()
+ console.log('[markMessageAsRead] Starting with:', {
+ chatId,
+ messageId,
+ userId
});
- console.log('[markMessageAsRead] Successfully marked message as read');
- } catch (error) {
- console.error('[markMessageAsRead] Error updating message:', error);
- throw error;
- }
+
+ // First check if the user is a participant in the chat
+ const chatRef = doc(chatsCollection, chatId);
+ console.log('[markMessageAsRead] Chat ref path:', chatRef.path);
+
+ const chatDoc = await getDoc(chatRef);
+ console.log('[markMessageAsRead] Chat doc exists:', chatDoc.exists());
+
+ if (!chatDoc.exists()) {
+ console.error('[markMessageAsRead] Chat not found:', chatId);
+ throw new Error('Chat not found');
+ }
+
+ const chatData = chatDoc.data();
+ console.log('[markMessageAsRead] Chat data:', {
+ participants: chatData.participants,
+ userId,
+ isParticipant: chatData.participants.includes(userId)
+ });
+
+ if (!chatData.participants.includes(userId)) {
+ console.error('[markMessageAsRead] User not in participants:', {
+ userId,
+ participants: chatData.participants
+ });
+ throw new Error('User is not a participant in this chat');
+ }
+
+ // Then update the message
+ const messageRef = doc(chatMessagesCollection(chatId), messageId);
+ console.log('[markMessageAsRead] Message ref path:', messageRef.path);
+
+ try {
+ await updateDoc(messageRef, {
+ readBy: arrayUnion(userId),
+ updatedAt: serverTimestamp()
+ });
+ console.log('[markMessageAsRead] Successfully marked message as read');
+ } catch (error) {
+ console.error('[markMessageAsRead] Error updating message:', error);
+ throw error;
+ }
}
export async function getChatParticipants(chatId: string): Promise {
- const chatDoc = await getDoc(doc(chatsCollection, chatId));
- if (!chatDoc.exists()) throw new Error('Chat not found');
- return chatDoc.data().participants;
+ const chatDoc = await getDoc(doc(chatsCollection, chatId));
+ if (!chatDoc.exists()) throw new Error('Chat not found');
+ return chatDoc.data().participants;
}
export async function addParticipantToChat(
- chatId: string,
- userId: string
+ chatId: string,
+ userId: string
): Promise {
- const chatRef = doc(chatsCollection, chatId);
- await updateDoc(chatRef, {
- participants: arrayUnion(userId),
- updatedAt: serverTimestamp()
- });
+ const chatRef = doc(chatsCollection, chatId);
+ await updateDoc(chatRef, {
+ participants: arrayUnion(userId),
+ updatedAt: serverTimestamp()
+ });
}
export async function removeParticipantFromChat(
- chatId: string,
- userId: string
+ chatId: string,
+ userId: string
): Promise {
- const chatRef = doc(chatsCollection, chatId);
- await updateDoc(chatRef, {
- participants: arrayRemove(userId),
- updatedAt: serverTimestamp()
- });
+ const chatRef = doc(chatsCollection, chatId);
+ await updateDoc(chatRef, {
+ participants: arrayRemove(userId),
+ updatedAt: serverTimestamp()
+ });
}
export async function getOrCreateDirectChat(
- userId: string,
- targetUserId: string
+ userId: string,
+ targetUserId: string
): Promise {
- // Check if a direct chat already exists between these users
- const existingChatsQuery = query(
- chatsCollection,
- where('type', '==', 'direct'),
- where('participants', 'array-contains', userId)
- );
-
- const existingChats = await getDocs(existingChatsQuery);
-
- for (const doc of existingChats.docs) {
- const chat = doc.data();
- if (chat.participants.includes(targetUserId)) {
- return doc.id;
+ // Check if a direct chat already exists between these users
+ const existingChatsQuery = query(
+ chatsCollection,
+ where('type', '==', 'direct'),
+ where('participants', 'array-contains', userId)
+ );
+
+ const existingChats = await getDocs(existingChatsQuery);
+
+ for (const doc of existingChats.docs) {
+ const chat = doc.data();
+ if (chat.participants.includes(targetUserId)) {
+ return doc.id;
+ }
}
- }
-
- // If no existing chat, create a new one
- const newChatRef = doc(chatsCollection);
- const newChat: WithFieldValue = {
- id: newChatRef.id,
- type: 'direct',
- participants: [userId, targetUserId],
- createdAt: serverTimestamp(),
- updatedAt: serverTimestamp()
- };
-
- await setDoc(newChatRef, newChat);
- return newChatRef.id;
+
+ // If no existing chat, create a new one
+ const newChatRef = doc(chatsCollection);
+ const newChat: WithFieldValue = {
+ id: newChatRef.id,
+ type: 'direct',
+ participants: [userId, targetUserId],
+ createdAt: serverTimestamp(),
+ updatedAt: serverTimestamp()
+ };
+
+ await setDoc(newChatRef, newChat);
+ return newChatRef.id;
}
diff --git a/platforms/blabsy/src/lib/types/tweet.ts b/platforms/blabsy/src/lib/types/tweet.ts
index d9c408f4..83799206 100644
--- a/platforms/blabsy/src/lib/types/tweet.ts
+++ b/platforms/blabsy/src/lib/types/tweet.ts
@@ -3,28 +3,28 @@ import type { ImagesPreview } from './file';
import type { User } from './user';
export type Tweet = {
- id: string;
- text: string | null;
- images: ImagesPreview | null;
- parent: { id: string; username: string } | null;
- userLikes: string[];
- createdBy: string;
- createdAt: Timestamp;
- updatedAt: Timestamp | null;
- userReplies: number;
- userRetweets: string[];
+ id: string;
+ text: string | null;
+ images: ImagesPreview | null;
+ parent: { id: string; username: string } | null;
+ userLikes: string[];
+ createdBy: string;
+ createdAt: Timestamp;
+ updatedAt: Timestamp | null;
+ userReplies: number;
+ userRetweets: string[];
};
export type TweetWithUser = Tweet & { user: User };
export const tweetConverter: FirestoreDataConverter = {
- toFirestore(tweet) {
- return { ...tweet };
- },
- fromFirestore(snapshot, options) {
- const { id } = snapshot;
- const data = snapshot.data(options);
+ toFirestore(tweet) {
+ return { ...tweet };
+ },
+ fromFirestore(snapshot, options) {
+ const { id } = snapshot;
+ const data = snapshot.data(options);
- return { id, ...data } as Tweet;
- }
+ return { id, ...data } as Tweet;
+ }
};
diff --git a/platforms/pictique-api/package.json b/platforms/pictique-api/package.json
index 8a1fe477..865b2dde 100644
--- a/platforms/pictique-api/package.json
+++ b/platforms/pictique-api/package.json
@@ -18,10 +18,11 @@
"dotenv": "^16.4.5",
"eventsource-polyfill": "^0.9.6",
"express": "^4.18.2",
+ "graphql-request": "^6.1.0",
"jsonwebtoken": "^9.0.2",
"pg": "^8.11.3",
"reflect-metadata": "^0.2.1",
- "typeorm": "^0.3.20",
+ "typeorm": "^0.3.24",
"uuid": "^9.0.1"
},
"devDependencies": {
diff --git a/platforms/pictique-api/reset.sh b/platforms/pictique-api/reset.sh
new file mode 100755
index 00000000..d61704ed
--- /dev/null
+++ b/platforms/pictique-api/reset.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+
+set -e # Exit on any error
+
+# Go into psql and run SQL commands as postgres user
+echo "[INFO] Resetting Postgres DB..."
+psql < {
+ console.log("received event to send", data);
res.write(`data: ${JSON.stringify(data)}\n\n`);
};
diff --git a/platforms/pictique-api/src/controllers/PostController.ts b/platforms/pictique-api/src/controllers/PostController.ts
index d33bb565..f754323e 100644
--- a/platforms/pictique-api/src/controllers/PostController.ts
+++ b/platforms/pictique-api/src/controllers/PostController.ts
@@ -22,7 +22,7 @@ export class PostController {
const feed = await this.postService.getFollowingFeed(
userId,
page,
- limit,
+ limit
);
res.json(feed);
} catch (error) {
@@ -63,7 +63,6 @@ export class PostController {
}
const post = await this.postService.toggleLike(postId, userId);
- console.log(post);
res.json(post);
} catch (error) {
console.error("Error toggling like:", error);
diff --git a/platforms/pictique-api/src/controllers/UserController.ts b/platforms/pictique-api/src/controllers/UserController.ts
index f10ef9f0..fc59495a 100644
--- a/platforms/pictique-api/src/controllers/UserController.ts
+++ b/platforms/pictique-api/src/controllers/UserController.ts
@@ -81,10 +81,12 @@ export class UserController {
return res.status(401).json({ error: "Unauthorized" });
}
+ const user = await this.userService.findById(userId);
+
const updatedUser = await this.userService.updateProfile(userId, {
- handle,
- avatarUrl: avatar,
- name
+ handle: handle ?? user?.handle,
+ avatarUrl: avatar ?? user?.avatarUrl,
+ name: name ?? user?.name,
});
res.json(updatedUser);
diff --git a/platforms/pictique-api/src/controllers/WebhookController.ts b/platforms/pictique-api/src/controllers/WebhookController.ts
new file mode 100644
index 00000000..0593eab8
--- /dev/null
+++ b/platforms/pictique-api/src/controllers/WebhookController.ts
@@ -0,0 +1,298 @@
+import { Request, Response } from "express";
+import { UserService } from "../services/UserService";
+import { ChatService } from "../services/ChatService";
+import { PostService } from "../services/PostService";
+import { CommentService } from "../services/CommentService";
+import { Web3Adapter } from "../../../../infrastructure/web3-adapter/src";
+import { User } from "database/entities/User";
+import { Chat } from "database/entities/Chat";
+import { MessageService } from "../services/MessageService";
+import { Post } from "database/entities/Post";
+
+export class WebhookController {
+ userService: UserService;
+ chatService: ChatService;
+ postsService: PostService;
+ commentService: CommentService;
+ adapter: Web3Adapter;
+ messageService: MessageService;
+
+ constructor(adapter: Web3Adapter) {
+ this.userService = new UserService();
+ this.chatService = new ChatService();
+ this.postsService = new PostService();
+ this.commentService = new CommentService();
+ this.adapter = adapter;
+ this.messageService = new MessageService();
+ }
+
+ handleWebhook = async (req: Request, res: Response) => {
+ try {
+ console.log("raw hook", req.body);
+ const schemaId = req.body.schemaId;
+ const globalId = req.body.id;
+ const mapping = Object.values(this.adapter.mapping).find(
+ (m) => m.schemaId === schemaId
+ );
+ this.adapter.addToLockedIds(globalId);
+
+ if (!mapping) throw new Error();
+ const local = await this.adapter.fromGlobal({
+ data: req.body.data,
+ mapping,
+ });
+
+ mapping.tableName =
+ mapping.tableName === "comments" ? "posts" : mapping.tableName;
+ let localId = await this.adapter.mappingDb.getLocalId(globalId);
+
+ if (mapping.tableName === "users") {
+ if (localId) {
+ const user = await this.userService.findById(localId);
+ for (const key of Object.keys(local.data)) {
+ // @ts-ignore
+ user[key] = local.data[key];
+ }
+ if (!user) throw new Error();
+ user.name = req.body.data.displayName;
+ await this.userService.userRepository.save(user);
+ await this.adapter.mappingDb.storeMapping({
+ localId: user.id,
+ globalId: req.body.id,
+ });
+ this.adapter.addToLockedIds(user.id);
+ this.adapter.addToLockedIds(globalId);
+ } else {
+ const { user } = await this.userService.findOrCreateUser(
+ req.body.w3id
+ );
+ for (const key of Object.keys(local.data)) {
+ // @ts-ignore
+ user[key] = local.data[key];
+ }
+ user.name = req.body.data.displayName;
+ await this.userService.userRepository.save(user);
+ await this.adapter.mappingDb.storeMapping({
+ localId: user.id,
+ globalId: req.body.id,
+ });
+ this.adapter.addToLockedIds(user.id);
+ this.adapter.addToLockedIds(globalId);
+ }
+ } else if (mapping.tableName === "posts") {
+ let author: User | null = null;
+ if (local.data.author) {
+ const authorId = local.data.author
+ // @ts-ignore
+ .split("(")[1]
+ .split(")")[0];
+ author = await this.userService.findById(authorId);
+ }
+ let likedBy: User[] = [];
+ if (local.data.likedBy && Array.isArray(local.data.likedBy)) {
+ const likedByPromises = local.data.likedBy.map(
+ async (ref: string) => {
+ if (ref && typeof ref === "string") {
+ const userId = ref.split("(")[1].split(")")[0];
+ return await this.userService.findById(userId);
+ }
+ return null;
+ }
+ );
+ likedBy = (await Promise.all(likedByPromises)).filter(
+ (user): user is User => user !== null
+ );
+ }
+
+ if (local.data.parentPostId) {
+ const parentId = (local.data.parentPostId as string)
+ .split("(")[1]
+ .split(")")[0];
+ const parent = await this.postsService.findById(parentId);
+ if (localId) {
+ const comment =
+ await this.commentService.getCommentById(localId);
+ if (!comment) return;
+ comment.text = local.data.text as string;
+ comment.likedBy = likedBy as User[];
+ comment.author = author as User;
+ comment.post = parent as Post;
+ await this.commentService.commentRepository.save(
+ comment
+ );
+ } else {
+ const comment = await this.commentService.createComment(
+ parent?.id as string,
+ author?.id as string,
+ local.data.text as string
+ );
+ localId = comment.id;
+ await this.adapter.mappingDb.storeMapping({
+ localId,
+ globalId,
+ });
+ }
+ this.adapter.addToLockedIds(localId);
+ } else {
+ let likedBy: User[] = [];
+ if (
+ local.data.likedBy &&
+ Array.isArray(local.data.likedBy)
+ ) {
+ const likedByPromises = local.data.likedBy.map(
+ async (ref: string) => {
+ if (ref && typeof ref === "string") {
+ const userId = ref
+ .split("(")[1]
+ .split(")")[0];
+ return await this.userService.findById(
+ userId
+ );
+ }
+ return null;
+ }
+ );
+ likedBy = (await Promise.all(likedByPromises)).filter(
+ (user): user is User => user !== null
+ );
+ }
+
+ if (localId) {
+ const post = await this.postsService.findById(localId);
+ if (!post) return res.status(500).send();
+ for (const key of Object.keys(local.data)) {
+ // @ts-ignore
+ post[key] = local.data[key];
+ }
+ post.likedBy = likedBy;
+ // @ts-ignore
+ post.author = author ?? undefined;
+
+ this.adapter.addToLockedIds(localId);
+ await this.postsService.postRepository.save(post);
+ } else {
+ console.log("Creating new post");
+ const post = await this.postsService.createPost(
+ author?.id as string,
+ // @ts-ignore
+ {
+ ...local.data,
+ likedBy,
+ }
+ );
+
+ this.adapter.addToLockedIds(post.id);
+ await this.adapter.mappingDb.storeMapping({
+ localId: post.id,
+ globalId,
+ });
+
+ // Verify the mapping was stored
+ const verifyLocalId =
+ await this.adapter.mappingDb.getLocalId(globalId);
+ console.log("Verified mapping:", {
+ expected: post.id,
+ actual: verifyLocalId,
+ });
+ }
+ }
+ } else if (mapping.tableName === "chats") {
+ let participants: User[] = [];
+ if (
+ local.data.participants &&
+ Array.isArray(local.data.participants)
+ ) {
+ console.log(local);
+ const participantPromises = local.data.participants.map(
+ async (ref: string) => {
+ if (ref && typeof ref === "string") {
+ const userId = ref.split("(")[1].split(")")[0];
+ return await this.userService.findById(userId);
+ }
+ return null;
+ }
+ );
+ participants = (
+ await Promise.all(participantPromises)
+ ).filter((user): user is User => user !== null);
+ console.log(participants);
+ }
+
+ if (localId) {
+ const chat = await this.chatService.findById(localId);
+ if (!chat) return res.status(500).send();
+
+ chat.name = local.data.name as string;
+ chat.participants = participants;
+
+ this.adapter.addToLockedIds(localId);
+ await this.chatService.chatRepository.save(chat);
+ } else {
+ const chat = await this.chatService.createChat(
+ local.data.name as string,
+ participants.map((p) => p.id)
+ );
+
+ this.adapter.addToLockedIds(chat.id);
+ await this.adapter.mappingDb.storeMapping({
+ localId: chat.id,
+ globalId: req.body.id,
+ });
+ }
+ } else if (mapping.tableName === "messages") {
+ console.log("messages");
+ console.log(local.data);
+ let sender: User | null = null;
+ if (
+ local.data.sender &&
+ typeof local.data.sender === "string"
+ ) {
+ const senderId = local.data.sender
+ .split("(")[1]
+ .split(")")[0];
+ sender = await this.userService.findById(senderId);
+ }
+
+ let chat: Chat | null = null;
+ if (local.data.chat && typeof local.data.chat === "string") {
+ const chatId = local.data.chat.split("(")[1].split(")")[0];
+ chat = await this.chatService.findById(chatId);
+ }
+
+ if (!sender || !chat) {
+ console.log(local.data);
+ console.log("Missing sender or chat for message");
+ return res.status(400).send();
+ }
+
+ if (localId) {
+ console.log("Updating existing message");
+ const message = await this.messageService.findById(localId);
+ if (!message) return res.status(500).send();
+
+ message.text = local.data.text as string;
+ message.sender = sender;
+ message.chat = chat;
+
+ this.adapter.addToLockedIds(localId);
+ await this.messageService.messageRepository.save(message);
+ } else {
+ const message = await this.chatService.sendMessage(
+ chat.id,
+ sender.id,
+ local.data.text as string
+ );
+
+ this.adapter.addToLockedIds(message.id);
+ await this.adapter.mappingDb.storeMapping({
+ localId: message.id,
+ globalId: req.body.id,
+ });
+ }
+ }
+ res.status(200).send();
+ } catch (e) {
+ console.error(e);
+ }
+ };
+}
diff --git a/platforms/pictique-api/src/database/data-source.ts b/platforms/pictique-api/src/database/data-source.ts
index afde67f9..879fe0c2 100644
--- a/platforms/pictique-api/src/database/data-source.ts
+++ b/platforms/pictique-api/src/database/data-source.ts
@@ -8,6 +8,7 @@ import { Message } from "./entities/Message";
import path from "path";
import { Chat } from "./entities/Chat";
import { MessageReadStatus } from "./entities/MessageReadStatus";
+import { PostgresSubscriber } from "../web3adapter/watchers/subscriber";
config({ path: path.resolve(__dirname, "../../../../.env") });
@@ -18,5 +19,5 @@ export const AppDataSource = new DataSource({
logging: process.env.NODE_ENV === "development",
entities: [User, Post, Comment, Message, Chat, MessageReadStatus],
migrations: ["src/database/migrations/*.ts"],
- subscribers: [],
+ subscribers: [PostgresSubscriber],
});
diff --git a/platforms/pictique-api/src/database/migrations/1749561069022-migration.ts b/platforms/pictique-api/src/database/migrations/1749561069022-migration.ts
new file mode 100644
index 00000000..33d26378
--- /dev/null
+++ b/platforms/pictique-api/src/database/migrations/1749561069022-migration.ts
@@ -0,0 +1,20 @@
+import { MigrationInterface, QueryRunner } from "typeorm";
+
+export class Migration1749561069022 implements MigrationInterface {
+ name = 'Migration1749561069022'
+
+ public async up(queryRunner: QueryRunner): Promise {
+ await queryRunner.query(`CREATE TABLE "__web3_id_mapping" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "localId" character varying NOT NULL, "metaEnvelopeId" character varying NOT NULL, "entityType" character varying NOT NULL, "createdAt" TIMESTAMP NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP NOT NULL DEFAULT now(), CONSTRAINT "PK_4c57c87c4ee60f42d9c6b0861c2" PRIMARY KEY ("id"))`);
+ await queryRunner.query(`CREATE INDEX "IDX_e6df8ee410baeffd472e93cdd2" ON "__web3_id_mapping" ("localId") `);
+ await queryRunner.query(`CREATE INDEX "IDX_9bdab2968d15942d3e3187a620" ON "__web3_id_mapping" ("metaEnvelopeId") `);
+ await queryRunner.query(`CREATE INDEX "IDX_f62e57b7b9f593f2e1715912c9" ON "__web3_id_mapping" ("entityType") `);
+ }
+
+ public async down(queryRunner: QueryRunner): Promise {
+ await queryRunner.query(`DROP INDEX "public"."IDX_f62e57b7b9f593f2e1715912c9"`);
+ await queryRunner.query(`DROP INDEX "public"."IDX_9bdab2968d15942d3e3187a620"`);
+ await queryRunner.query(`DROP INDEX "public"."IDX_e6df8ee410baeffd472e93cdd2"`);
+ await queryRunner.query(`DROP TABLE "__web3_id_mapping"`);
+ }
+
+}
diff --git a/platforms/pictique-api/src/index.ts b/platforms/pictique-api/src/index.ts
index ff233dc6..78f3721f 100644
--- a/platforms/pictique-api/src/index.ts
+++ b/platforms/pictique-api/src/index.ts
@@ -10,47 +10,59 @@ import { CommentController } from "./controllers/CommentController";
import { MessageController } from "./controllers/MessageController";
import { authMiddleware, authGuard } from "./middleware/auth";
import { UserController } from "./controllers/UserController";
+import { WebhookController } from "./controllers/WebhookController";
+import { adapter } from "./web3adapter/watchers/subscriber";
config({ path: path.resolve(__dirname, "../../../.env") });
const app = express();
const port = process.env.PORT || 3000;
+// Initialize database connection and adapter
+AppDataSource.initialize()
+ .then(async () => {
+ console.log("Database connection established");
+ console.log("Web3 adapter initialized");
+ })
+ .catch((error: any) => {
+ console.error("Error during initialization:", error);
+ process.exit(1);
+ });
+
// Middleware
app.use(
cors({
origin: "*",
methods: ["GET", "POST", "OPTIONS", "PATCH", "DELETE"],
- allowedHeaders: ["Content-Type", "Authorization"],
+ allowedHeaders: [
+ "Content-Type",
+ "Authorization",
+ "X-Webhook-Signature",
+ "X-Webhook-Timestamp",
+ ],
credentials: true,
}),
);
app.use(express.json({ limit: "50mb" }));
app.use(express.urlencoded({ limit: "50mb", extended: true }));
-// Initialize database connection
-AppDataSource.initialize()
- .then(() => {
- console.log("Database connection established");
- })
- .catch((error) => {
- console.error("Error connecting to database:", error);
- process.exit(1);
- });
-
// Controllers
const postController = new PostController();
const authController = new AuthController();
const commentController = new CommentController();
const messageController = new MessageController();
const userController = new UserController();
+const webhookController = new WebhookController(adapter);
+
+// Webhook route (no auth required)
+// app.post("/api/webhook", adapter.webhookHandler.handleWebhook);
// Public routes (no auth required)
app.get("/api/auth/offer", authController.getOffer);
-app.get("/api/auth/offerb", authController.getOfferBlab);
app.post("/api/auth", authController.login);
app.get("/api/auth/sessions/:id", authController.sseStream);
app.get("/api/chats/:chatId/events", messageController.getChatEvents);
+app.post("/api/webhook", webhookController.handleWebhook);
// Protected routes (auth required)
app.use(authMiddleware); // Apply auth middleware to all routes below
diff --git a/platforms/pictique-api/src/middleware/auth.ts b/platforms/pictique-api/src/middleware/auth.ts
index fdace1d2..51346556 100644
--- a/platforms/pictique-api/src/middleware/auth.ts
+++ b/platforms/pictique-api/src/middleware/auth.ts
@@ -11,7 +11,7 @@ export const authMiddleware = async (
try {
const authHeader = req.headers.authorization;
if (!authHeader?.startsWith("Bearer ")) {
- return res.status(401).json({ error: "No token provided" });
+ return next();
}
const token = authHeader.split(" ")[1];
@@ -29,7 +29,6 @@ export const authMiddleware = async (
}
req.user = user;
- console.log("user", user.ename);
next();
} catch (error) {
console.error("Auth middleware error:", error);
@@ -43,4 +42,3 @@ export const authGuard = (req: Request, res: Response, next: NextFunction) => {
}
next();
};
-
diff --git a/platforms/pictique-api/src/services/ChatService.ts b/platforms/pictique-api/src/services/ChatService.ts
index 1deb0ae6..ab548bee 100644
--- a/platforms/pictique-api/src/services/ChatService.ts
+++ b/platforms/pictique-api/src/services/ChatService.ts
@@ -5,14 +5,19 @@ import { User } from "../database/entities/User";
import { MessageReadStatus } from "../database/entities/MessageReadStatus";
import { In } from "typeorm";
import { EventEmitter } from "events";
+import { emitter } from "./event-emitter";
export class ChatService {
- private chatRepository = AppDataSource.getRepository(Chat);
+ public chatRepository = AppDataSource.getRepository(Chat);
private messageRepository = AppDataSource.getRepository(Message);
private userRepository = AppDataSource.getRepository(User);
private messageReadStatusRepository =
AppDataSource.getRepository(MessageReadStatus);
- private eventEmitter = new EventEmitter();
+ private eventEmitter: EventEmitter;
+
+ constructor() {
+ this.eventEmitter = emitter;
+ }
// Event emitter getter
getEventEmitter(): EventEmitter {
@@ -20,9 +25,35 @@ export class ChatService {
}
// Chat CRUD Operations
+ async findChatByParticipants(participantIds: string[]): Promise {
+ if (participantIds.length === 0) {
+ return null;
+ }
+
+ // Find chats that have exactly the same participants
+ const chats = await this.chatRepository
+ .createQueryBuilder("chat")
+ .leftJoinAndSelect("chat.participants", "participants")
+ .getMany();
+
+ // Filter chats that have exactly the same participants (order doesn't matter)
+ const sortedParticipantIds = participantIds.sort();
+
+ for (const chat of chats) {
+ const chatParticipantIds = chat.participants.map(p => p.id).sort();
+
+ if (chatParticipantIds.length === sortedParticipantIds.length &&
+ chatParticipantIds.every((id, index) => id === sortedParticipantIds[index])) {
+ return chat;
+ }
+ }
+
+ return null;
+ }
+
async createChat(
name?: string,
- participantIds: string[] = [],
+ participantIds: string[] = []
): Promise {
const participants = await this.userRepository.findBy({
id: In(participantIds),
@@ -70,7 +101,7 @@ export class ChatService {
// Participant Operations
async addParticipants(
chatId: string,
- participantIds: string[],
+ participantIds: string[]
): Promise {
const chat = await this.getChatById(chatId);
if (!chat) {
@@ -102,7 +133,7 @@ export class ChatService {
async sendMessage(
chatId: string,
senderId: string,
- text: string,
+ text: string
): Promise {
const chat = await this.getChatById(chatId);
if (!chat) {
@@ -126,8 +157,9 @@ export class ChatService {
});
const savedMessage = await this.messageRepository.save(message);
+ console.log("Sent event", `chat:${chatId}`);
+ this.eventEmitter.emit(`chat:${chatId}`, [savedMessage]);
- // Create read status entries for all participants except sender
const readStatuses = chat.participants
.filter((p) => p.id !== senderId)
.map((participant) =>
@@ -135,13 +167,12 @@ export class ChatService {
message: savedMessage,
user: participant,
isRead: false,
- }),
+ })
);
- await this.messageReadStatusRepository.save(readStatuses);
-
- // Emit new message event
- this.eventEmitter.emit(`chat:${chatId}`, [savedMessage]);
+ await this.messageReadStatusRepository
+ .save(readStatuses)
+ .catch(() => null);
return savedMessage;
}
@@ -150,7 +181,7 @@ export class ChatService {
chatId: string,
userId: string,
page: number = 1,
- limit: number = 20,
+ limit: number = 20
): Promise<{
messages: Message[];
total: number;
@@ -201,7 +232,9 @@ export class ChatService {
.createQueryBuilder()
.update(MessageReadStatus)
.set({ isRead: true })
- .where("message.id IN (:...messageIds)", { messageIds: messageIds.map(m => m.id) })
+ .where("message.id IN (:...messageIds)", {
+ messageIds: messageIds.map((m) => m.id),
+ })
.andWhere("user.id = :userId", { userId })
.andWhere("isRead = :isRead", { isRead: false })
.execute();
@@ -228,7 +261,7 @@ export class ChatService {
async getUserChats(
userId: string,
page: number = 1,
- limit: number = 10,
+ limit: number = 10
): Promise<{
chats: (Chat & { latestMessage?: { text: string; isRead: boolean } })[];
total: number;
@@ -291,7 +324,7 @@ export class ChatService {
async getUnreadMessageCount(
chatId: string,
- userId: string,
+ userId: string
): Promise {
return await this.messageReadStatusRepository.count({
where: {
@@ -301,4 +334,11 @@ export class ChatService {
},
});
}
+
+ async findById(id: string): Promise {
+ return await this.chatRepository.findOne({
+ where: { id },
+ relations: ["participants"],
+ });
+ }
}
diff --git a/platforms/pictique-api/src/services/CommentService.ts b/platforms/pictique-api/src/services/CommentService.ts
index e994aa6c..6f2f6e68 100644
--- a/platforms/pictique-api/src/services/CommentService.ts
+++ b/platforms/pictique-api/src/services/CommentService.ts
@@ -3,19 +3,23 @@ import { Comment } from "../database/entities/Comment";
import { Post } from "../database/entities/Post";
export class CommentService {
- private commentRepository = AppDataSource.getRepository(Comment);
+ commentRepository = AppDataSource.getRepository(Comment);
private postRepository = AppDataSource.getRepository(Post);
- async createComment(postId: string, authorId: string, text: string): Promise {
+ async createComment(
+ postId: string,
+ authorId: string,
+ text: string
+ ): Promise {
const post = await this.postRepository.findOneBy({ id: postId });
if (!post) {
- throw new Error('Post not found');
+ throw new Error("Post not found");
}
const comment = this.commentRepository.create({
text,
author: { id: authorId },
- post: { id: postId }
+ post: { id: postId },
});
return await this.commentRepository.save(comment);
@@ -24,22 +28,22 @@ export class CommentService {
async getPostComments(postId: string): Promise {
return await this.commentRepository.find({
where: { post: { id: postId } },
- relations: ['author'],
- order: { createdAt: 'DESC' }
+ relations: ["author"],
+ order: { createdAt: "DESC" },
});
}
async getCommentById(id: string): Promise {
return await this.commentRepository.findOne({
where: { id },
- relations: ['author']
+ relations: ["author"],
});
}
async updateComment(id: string, text: string): Promise {
const comment = await this.getCommentById(id);
if (!comment) {
- throw new Error('Comment not found');
+ throw new Error("Comment not found");
}
comment.text = text;
@@ -49,9 +53,10 @@ export class CommentService {
async deleteComment(id: string): Promise {
const comment = await this.getCommentById(id);
if (!comment) {
- throw new Error('Comment not found');
+ throw new Error("Comment not found");
}
await this.commentRepository.softDelete(id);
}
-}
\ No newline at end of file
+}
+
diff --git a/platforms/pictique-api/src/services/MessageService.ts b/platforms/pictique-api/src/services/MessageService.ts
new file mode 100644
index 00000000..5d385439
--- /dev/null
+++ b/platforms/pictique-api/src/services/MessageService.ts
@@ -0,0 +1,21 @@
+import { AppDataSource } from "../database/data-source";
+import { Message } from "../database/entities/Message";
+
+export class MessageService {
+ public messageRepository = AppDataSource.getRepository(Message);
+
+ async findById(id: string): Promise {
+ return await this.messageRepository.findOneBy({ id });
+ }
+
+ async createMessage(senderId: string, chatId: string, text: string): Promise {
+ const message = this.messageRepository.create({
+ sender: { id: senderId },
+ chat: { id: chatId },
+ text,
+ isArchived: false
+ });
+
+ return await this.messageRepository.save(message);
+ }
+}
\ No newline at end of file
diff --git a/platforms/pictique-api/src/services/PostService.ts b/platforms/pictique-api/src/services/PostService.ts
index 47048a00..5e23cd89 100644
--- a/platforms/pictique-api/src/services/PostService.ts
+++ b/platforms/pictique-api/src/services/PostService.ts
@@ -7,12 +7,17 @@ interface CreatePostData {
text: string;
images?: string[];
hashtags?: string[];
+ likedBy?: User[];
}
export class PostService {
- private postRepository = AppDataSource.getRepository(Post);
+ postRepository = AppDataSource.getRepository(Post);
private userRepository = AppDataSource.getRepository(User);
+ async findById(id: string) {
+ return await this.postRepository.findOneBy({ id });
+ }
+
async getFollowingFeed(userId: string, page: number, limit: number) {
const user = await this.userRepository.findOne({
where: { id: userId },
@@ -28,7 +33,6 @@ export class PostService {
const [posts, total] = await this.postRepository.findAndCount({
where: {
- author: { id: In(authorIds) },
isArchived: false,
},
relations: ["author", "likedBy", "comments", "comments.author"],
@@ -58,7 +62,7 @@ export class PostService {
text: data.text,
images: data.images || [],
hashtags: data.hashtags || [],
- likedBy: [],
+ likedBy: data.likedBy,
});
return await this.postRepository.save(post);
diff --git a/platforms/pictique-api/src/services/UserService.ts b/platforms/pictique-api/src/services/UserService.ts
index 6f68ec2f..daab306b 100644
--- a/platforms/pictique-api/src/services/UserService.ts
+++ b/platforms/pictique-api/src/services/UserService.ts
@@ -5,7 +5,7 @@ import { signToken } from "../utils/jwt";
import { Like } from "typeorm";
export class UserService {
- private userRepository = AppDataSource.getRepository(User);
+ userRepository = AppDataSource.getRepository(User);
private postRepository = AppDataSource.getRepository(Post);
async createBlankUser(ename: string): Promise {
@@ -20,7 +20,7 @@ export class UserService {
}
async findOrCreateUser(
- ename: string,
+ ename: string
): Promise<{ user: User; token: string }> {
let user = await this.userRepository.findOne({
where: { ename },
@@ -39,11 +39,11 @@ export class UserService {
}
searchUsers = async (query: string) => {
- const searchQuery = query.toLowerCase();
+ const searchQuery = query;
return this.userRepository.find({
where: [
- { handle: Like(`%${searchQuery}%`) },
+ { name: Like(`%${searchQuery}%`) },
{ ename: Like(`%${searchQuery}%`) },
],
select: {
@@ -126,7 +126,10 @@ export class UserService {
};
}
- async updateProfile(userId: string, data: { handle?: string; avatarUrl?: string; name?: string }): Promise {
+ async updateProfile(
+ userId: string,
+ data: { handle?: string; avatarUrl?: string; name?: string }
+ ): Promise {
const user = await this.userRepository.findOneBy({ id: userId });
if (!user) {
throw new Error("User not found");
@@ -140,4 +143,3 @@ export class UserService {
return await this.userRepository.save(user);
}
}
-
diff --git a/platforms/pictique-api/src/services/event-emitter.ts b/platforms/pictique-api/src/services/event-emitter.ts
new file mode 100644
index 00000000..320b7406
--- /dev/null
+++ b/platforms/pictique-api/src/services/event-emitter.ts
@@ -0,0 +1,3 @@
+import { EventEmitter } from "events";
+
+export const emitter = new EventEmitter();
diff --git a/platforms/pictique-api/src/web3adapter/index.ts b/platforms/pictique-api/src/web3adapter/index.ts
new file mode 100644
index 00000000..e69de29b
diff --git a/platforms/pictique-api/src/web3adapter/mappings/chat.mapping.json b/platforms/pictique-api/src/web3adapter/mappings/chat.mapping.json
new file mode 100644
index 00000000..0b8bc7d6
--- /dev/null
+++ b/platforms/pictique-api/src/web3adapter/mappings/chat.mapping.json
@@ -0,0 +1,13 @@
+{
+ "tableName": "chats",
+ "schemaId": "550e8400-e29b-41d4-a716-446655440003",
+ "ownerEnamePath": "users(participants[].ename)",
+ "ownedJunctionTables": [],
+ "localToUniversalMap": {
+ "name": "name",
+ "type": "type",
+ "participants": "users(participants[].id),participantIds",
+ "createdAt": "createdAt",
+ "updatedAt": "updatedAt"
+ }
+}
diff --git a/platforms/pictique-api/src/web3adapter/mappings/comment.mapping.json b/platforms/pictique-api/src/web3adapter/mappings/comment.mapping.json
new file mode 100644
index 00000000..822e8a5e
--- /dev/null
+++ b/platforms/pictique-api/src/web3adapter/mappings/comment.mapping.json
@@ -0,0 +1,19 @@
+{
+ "tableName": "comments",
+ "schemaId": "550e8400-e29b-41d4-a716-446655440001",
+ "ownerEnamePath": "users(author.ename)",
+ "ownedJunctionTables": [
+ "comment_likes"
+ ],
+ "localToUniversalMap": {
+ "text": "content",
+ "images": "mediaUrls",
+ "hashtags": "tags",
+ "createdAt": "createdAt",
+ "parentPostId": "posts(post.id),parentPostId",
+ "updatedAt": "updatedAt",
+ "isArchived": "isArchived",
+ "likedBy": "users(likedBy[].id),likedBy",
+ "author": "users(author.id),authorId"
+ }
+}
diff --git a/platforms/pictique-api/src/web3adapter/mappings/message.mapping.json b/platforms/pictique-api/src/web3adapter/mappings/message.mapping.json
new file mode 100644
index 00000000..e7f495db
--- /dev/null
+++ b/platforms/pictique-api/src/web3adapter/mappings/message.mapping.json
@@ -0,0 +1,12 @@
+{
+ "tableName": "messages",
+ "schemaId": "550e8400-e29b-41d4-a716-446655440004",
+ "ownerEnamePath": "users(sender.ename)",
+ "localToUniversalMap": {
+ "chat": "chats(chat.id),chatId",
+ "text": "content",
+ "sender": "users(sender.id),senderId",
+ "createdAt": "createdAt",
+ "updatedAt": "updatedAt"
+ }
+}
diff --git a/platforms/pictique-api/src/web3adapter/mappings/post.mapping.json b/platforms/pictique-api/src/web3adapter/mappings/post.mapping.json
new file mode 100644
index 00000000..4d09b222
--- /dev/null
+++ b/platforms/pictique-api/src/web3adapter/mappings/post.mapping.json
@@ -0,0 +1,19 @@
+{
+ "tableName": "posts",
+ "schemaId": "550e8400-e29b-41d4-a716-446655440001",
+ "ownerEnamePath": "users(author.ename)",
+ "ownedJunctionTables": [
+ "post_likes"
+ ],
+ "localToUniversalMap": {
+ "text": "content",
+ "images": "mediaUrls",
+ "hashtags": "tags",
+ "createdAt": "createdAt",
+ "parentPostId": "posts(parentPostId),parentPostId",
+ "updatedAt": "updatedAt",
+ "isArchived": "isArchived",
+ "likedBy": "users(likedBy[].id),likedBy",
+ "author": "users(author.id),authorId"
+ }
+}
diff --git a/platforms/pictique-api/src/web3adapter/mappings/user.mapping.json b/platforms/pictique-api/src/web3adapter/mappings/user.mapping.json
new file mode 100644
index 00000000..f896f59c
--- /dev/null
+++ b/platforms/pictique-api/src/web3adapter/mappings/user.mapping.json
@@ -0,0 +1,24 @@
+{
+ "tableName": "users",
+ "schemaId": "550e8400-e29b-41d4-a716-446655440000",
+ "ownerEnamePath": "ename",
+ "ownedJunctionTables": [
+ "user_followers",
+ "user_following"
+ ],
+ "localToUniversalMap": {
+ "handle": "username",
+ "name": "displayName",
+ "description": "bio",
+ "avatarUrl": "avatarUrl",
+ "bannerUrl": "bannerUrl",
+ "ename": "ename",
+ "isVerified": "isVerified",
+ "isPrivate": "isPrivate",
+ "createdAt": "createdAt",
+ "updatedAt": "updatedAt",
+ "isArchived": "isArchived",
+ "followers": "followers",
+ "following": "following"
+ }
+}
diff --git a/platforms/pictique-api/src/web3adapter/watchers/subscriber.ts b/platforms/pictique-api/src/web3adapter/watchers/subscriber.ts
new file mode 100644
index 00000000..3e3d00b8
--- /dev/null
+++ b/platforms/pictique-api/src/web3adapter/watchers/subscriber.ts
@@ -0,0 +1,315 @@
+import {
+ EventSubscriber,
+ EntitySubscriberInterface,
+ InsertEvent,
+ UpdateEvent,
+ RemoveEvent,
+ ObjectLiteral,
+} from "typeorm";
+import { Web3Adapter } from "../../../../../infrastructure/web3-adapter/src/index";
+import path from "path";
+import dotenv from "dotenv";
+import { AppDataSource } from "../../database/data-source";
+import axios from "axios";
+import { table } from "console";
+
+dotenv.config({ path: path.resolve(__dirname, "../../../../../.env") });
+export const adapter = new Web3Adapter({
+ schemasPath: path.resolve(__dirname, "../mappings/"),
+ dbPath: path.resolve(process.env.PICTIQUE_MAPPING_DB_PATH as string),
+ registryUrl: process.env.PUBLIC_REGISTRY_URL as string,
+ platform: process.env.PUBLIC_PICTIQUE_BASE_URL as string,
+});
+
+// Map of junction tables to their parent entities
+const JUNCTION_TABLE_MAP = {
+ user_followers: { entity: "User", idField: "user_id" },
+ user_following: { entity: "User", idField: "user_id" },
+ post_likes: { entity: "Post", idField: "post_id" },
+ comment_likes: { entity: "Comment", idField: "comment_id" },
+ chat_participants: { entity: "Chat", idField: "chat_id" },
+};
+
+@EventSubscriber()
+export class PostgresSubscriber implements EntitySubscriberInterface {
+ private adapter: Web3Adapter;
+
+ constructor() {
+ this.adapter = adapter;
+ }
+
+ /**
+ * Called after entity is loaded.
+ */
+ afterLoad(entity: any) {
+ // Handle any post-load processing if needed
+ }
+
+ /**
+ * Called before entity insertion.
+ */
+ beforeInsert(event: InsertEvent) {
+ // Handle any pre-insert processing if needed
+ }
+
+ async enrichEntity(entity: any, tableName: string, tableTarget: any) {
+ try {
+ const enrichedEntity = { ...entity };
+
+ if (entity.author) {
+ const author = await AppDataSource.getRepository(
+ "User"
+ ).findOne({ where: { id: entity.author.id } });
+ enrichedEntity.author = author;
+ }
+
+ return this.entityToPlain(enrichedEntity);
+ } catch (error) {
+ console.error("Error loading relations:", error);
+ return this.entityToPlain(entity);
+ }
+ }
+
+ /**
+ * Called after entity insertion.
+ */
+ async afterInsert(event: InsertEvent) {
+ let entity = event.entity;
+ if (entity) {
+ entity = (await this.enrichEntity(
+ entity,
+ event.metadata.tableName,
+ event.metadata.target
+ )) as ObjectLiteral;
+ }
+ this.handleChange(
+ // @ts-ignore
+ entity ?? event.entityId,
+ event.metadata.tableName.endsWith("s")
+ ? event.metadata.tableName
+ : event.metadata.tableName + "s"
+ );
+ }
+
+ /**
+ * Called before entity update.
+ */
+ beforeUpdate(event: UpdateEvent) {
+ // Handle any pre-update processing if needed
+ }
+
+ /**
+ * Called after entity update.
+ */
+ async afterUpdate(event: UpdateEvent) {
+ let entity = event.entity;
+ if (entity) {
+ entity = (await this.enrichEntity(
+ entity,
+ event.metadata.tableName,
+ event.metadata.target
+ )) as ObjectLiteral;
+ }
+ this.handleChange(
+ // @ts-ignore
+ entity ?? event.entityId,
+ event.metadata.tableName
+ );
+ }
+
+ /**
+ * Called before entity removal.
+ */
+ beforeRemove(event: RemoveEvent) {
+ // Handle any pre-remove processing if needed
+ }
+
+ /**
+ * Called after entity removal.
+ */
+ async afterRemove(event: RemoveEvent) {
+ let entity = event.entity;
+ if (entity) {
+ entity = (await this.enrichEntity(
+ entity,
+ event.metadata.tableName,
+ event.metadata.target
+ )) as ObjectLiteral;
+ }
+ this.handleChange(
+ // @ts-ignore
+ entity ?? event.entityId,
+ event.metadata.tableName
+ );
+ }
+
+ /**
+ * Process the change and send it to the Web3Adapter
+ */
+ private async handleChange(entity: any, tableName: string): Promise {
+ // Check if this is a junction table
+ if (
+ tableName === "message_read_status" ||
+ tableName === "chat_participants"
+ )
+ return;
+ // @ts-ignore
+ const junctionInfo = JUNCTION_TABLE_MAP[tableName];
+ if (junctionInfo) {
+ console.log("Processing junction table change:", tableName);
+ await this.handleJunctionTableChange(entity, junctionInfo);
+ return;
+ }
+ // Handle regular entity changes
+ const data = this.entityToPlain(entity);
+ if (!data.id) return;
+
+ try {
+ setTimeout(async () => {
+ let globalId = await this.adapter.mappingDb.getGlobalId(
+ entity.id
+ );
+ globalId = globalId ?? "";
+
+ if (this.adapter.lockedIds.includes(globalId))
+ return console.log("locked skipping ", globalId);
+
+ console.log(
+ "sending packet for global Id",
+ globalId,
+ entity.id
+ );
+ const envelope = await this.adapter.handleChange({
+ data,
+ tableName: tableName.toLowerCase(),
+ });
+ }, 3_000);
+ } catch (error) {
+ console.error(`Error processing change for ${tableName}:`, error);
+ }
+ }
+
+ /**
+ * Handle changes in junction tables by converting them to parent entity changes
+ */
+ private async handleJunctionTableChange(
+ entity: any,
+ junctionInfo: { entity: string; idField: string }
+ ): Promise {
+ try {
+ const parentId = entity[junctionInfo.idField];
+ if (!parentId) {
+ console.error("No parent ID found in junction table change");
+ return;
+ }
+
+ // Get the parent entity repository
+ // GET THE REPOSITORY FROM ENTITY VIA THE MAIN THINGY AND THEN THIS
+ // PART IS TAKEN CARE OF, TEST MESSAGES & CHAT & STUFF TOMORROW
+ const repository = AppDataSource.getRepository(junctionInfo.entity);
+ const parentEntity = await repository.findOne({
+ where: { id: parentId },
+ relations: this.getRelationsForEntity(junctionInfo.entity),
+ });
+
+ if (!parentEntity) {
+ console.error(`Parent entity not found: ${parentId}`);
+ return;
+ }
+
+ let globalId = await this.adapter.mappingDb.getGlobalId(entity.id);
+ globalId = globalId ?? "";
+
+ // Use immediate locking instead of setTimeout to prevent race conditions
+ try {
+ setTimeout(async () => {
+ let globalId = await this.adapter.mappingDb.getGlobalId(
+ entity.id
+ );
+ globalId = globalId ?? "";
+
+ if (this.adapter.lockedIds.includes(globalId))
+ return console.log("locked skipping ", globalId);
+
+ console.log(
+ "sending packet for global Id",
+ globalId,
+ entity.id
+ );
+
+ const tableName = `${junctionInfo.entity.toLowerCase()}s`;
+ await this.adapter.handleChange({
+ data: this.entityToPlain(parentEntity),
+ tableName,
+ });
+ }, 3_000);
+ } catch (error) {
+ console.error(error);
+ }
+ } catch (error) {
+ console.error("Error handling junction table change:", error);
+ }
+ }
+
+ /**
+ * Get the relations that should be loaded for each entity type
+ */
+ private getRelationsForEntity(entityName: string): string[] {
+ switch (entityName) {
+ case "User":
+ return ["followers", "following", "posts", "comments", "chats"];
+ case "Post":
+ return ["author", "likedBy", "comments"];
+ case "Comment":
+ return ["author", "post", "likedBy"];
+ case "Chat":
+ return ["participants", "messages"];
+ default:
+ return [];
+ }
+ }
+
+ /**
+ * Convert TypeORM entity to plain object
+ */
+ private entityToPlain(entity: any): any {
+ if (!entity) return {};
+
+ // If it's already a plain object, return it
+ if (typeof entity !== "object" || entity === null) {
+ return entity;
+ }
+
+ // Handle Date objects
+ if (entity instanceof Date) {
+ return entity.toISOString();
+ }
+
+ // Handle arrays
+ if (Array.isArray(entity)) {
+ return entity.map((item) => this.entityToPlain(item));
+ }
+
+ // Convert entity to plain object
+ const plain: Record = {};
+ for (const [key, value] of Object.entries(entity)) {
+ // Skip private properties and methods
+ if (key.startsWith("_")) continue;
+
+ // Handle nested objects and arrays
+ if (value && typeof value === "object") {
+ if (Array.isArray(value)) {
+ plain[key] = value.map((item) => this.entityToPlain(item));
+ } else if (value instanceof Date) {
+ plain[key] = value.toISOString();
+ } else {
+ plain[key] = this.entityToPlain(value);
+ }
+ } else {
+ plain[key] = value;
+ }
+ }
+
+ return plain;
+ }
+}
diff --git a/platforms/pictique/package.json b/platforms/pictique/package.json
index f35eb2d6..f8b21b88 100644
--- a/platforms/pictique/package.json
+++ b/platforms/pictique/package.json
@@ -51,6 +51,7 @@
},
"dependencies": {
"-": "^0.0.1",
+ "@sveltejs/adapter-node": "^5.2.12",
"D": "^1.0.0",
"axios": "^1.6.7",
"moment": "^2.30.1",
diff --git a/platforms/pictique/src/lib/fragments/BottomNav/BottomNav.svelte b/platforms/pictique/src/lib/fragments/BottomNav/BottomNav.svelte
index c0dff182..4710c139 100644
--- a/platforms/pictique/src/lib/fragments/BottomNav/BottomNav.svelte
+++ b/platforms/pictique/src/lib/fragments/BottomNav/BottomNav.svelte
@@ -2,8 +2,8 @@
import { goto } from '$app/navigation';
import { page } from '$app/state';
import { Camera, CommentsTwo, Home, Search } from '$lib/icons';
- import { isNavigatingThroughNav, ownerId } from '$lib/store/store.svelte';
- import { uploadedImages } from '$lib/store/store.svelte';
+ import { isNavigatingThroughNav } from '$lib/store/store.svelte';
+ import { uploadedImages } from '$lib/store/store.svelte';
import { revokeImageUrls } from '$lib/utils';
import type { HTMLAttributes } from 'svelte/elements';
@@ -22,7 +22,7 @@
let fullPath = $derived(page.url.pathname);
let imageInput: HTMLInputElement;
- let images: FileList | null = $state(null);
+ let images: FileList | null = $state(null);
const handleNavClick = (newTab: string) => {
// activeTab = newTab;
@@ -34,9 +34,9 @@
previousTab = newTab;
if (newTab === 'profile') {
goto(`/profile/${ownerId}`);
- } else if (newTab === "post") {
- uploadedImages.value = null;
- imageInput.value = "";
+ } else if (newTab === 'post') {
+ uploadedImages.value = null;
+ imageInput.value = '';
imageInput.click();
} else {
goto(`/${newTab}`);
@@ -45,22 +45,34 @@
$effect(() => {
activeTab = _activeTab.split('/').pop() ?? '';
- if (images && images.length > 0 && activeTab !== 'post' && previousTab === 'post' && !_activeTab.includes('post/audience')) {
- if (uploadedImages.value)
- revokeImageUrls(uploadedImages.value);
- uploadedImages.value = Array.from(images).map(file => ({
- url: URL.createObjectURL(file),
- alt: file.name
- }));
+ if (
+ images &&
+ images.length > 0 &&
+ activeTab !== 'post' &&
+ previousTab === 'post' &&
+ !_activeTab.includes('post/audience')
+ ) {
+ if (uploadedImages.value) revokeImageUrls(uploadedImages.value);
+ uploadedImages.value = Array.from(images).map((file) => ({
+ url: URL.createObjectURL(file),
+ alt: file.name
+ }));
images = null; // To prevent re-triggering the effect and thus making an infinite loop with /post route's effect when the length of uploadedImages goes to 0
- if (uploadedImages.value.length > 0) {
- goto("/post");
- }
+ if (uploadedImages.value.length > 0) {
+ goto('/post');
+ }
}
});
-
+