diff --git a/Week1/assignment/exer1.js b/Week1/assignment/exer1.js new file mode 100644 index 000000000..653a741c7 --- /dev/null +++ b/Week1/assignment/exer1.js @@ -0,0 +1,106 @@ +const { Client } = require("pg"); + +const DB_USER = "hyfuser"; +const DB_PASSWORD = "hyfpassword"; +const DB_HOST = "localhost"; +const DB_PORT = 5432; + +const baseConfig = { + host: DB_HOST, + port: DB_PORT, + user: DB_USER, + password: DB_PASSWORD, +}; + +async function withClient(database, handler) { + const client = new Client({ ...baseConfig, database }); + await client.connect(); + + try { + return await handler(client); + } finally { + await client.end(); + } +} + +async function setupMeetup() { + try { + // 1) Drop & recreate database meetup + await withClient("postgres", async (client) => { + console.log("Recreating database meetup..."); + + await client.query("DROP DATABASE IF EXISTS meetup;"); + await client.query("CREATE DATABASE meetup;"); + }); + + + await withClient("meetup", async (db) => { + console.log("Creating tables..."); + + await db.query(` + CREATE TABLE Invitee ( + invitee_no SERIAL PRIMARY KEY, + invitee_name VARCHAR(80) NOT NULL, + invited_by VARCHAR(80) NOT NULL + ); + `); + + await db.query(` + CREATE TABLE Room ( + room_no SERIAL PRIMARY KEY, + room_name VARCHAR(80) NOT NULL, + floor_number INTEGER NOT NULL + ); + `); + + await db.query(` + CREATE TABLE Meeting ( + meeting_no SERIAL PRIMARY KEY, + meeting_title VARCHAR(120) NOT NULL, + starting_time TIMESTAMP NOT NULL, + ending_time TIMESTAMP NOT NULL, + room_no INTEGER REFERENCES Room(room_no) + ); + `); + + console.log("Inserting sample data into Invitee..."); + await db.query(` + INSERT INTO Invitee (invitee_name, invited_by) VALUES + ('Karim', 'Majd'), + ('Rim', 'Karim'), + ('Alaa', 'Karim'), + ('Ivan', 'Majd'), + ('Dima', 'Ivan'); + `); + + console.log("Inserting sample data into Room..."); + await db.query(` + INSERT INTO Room (room_name, floor_number) VALUES + ('Alfa Room', 1), + ('Atlas Room', 1), + ('Nova Room', 2), + ('Fox Room', 2), + ('Sky Room', 3); + `); + + console.log("Inserting sample data into Meeting..."); + await db.query(` + INSERT INTO Meeting (meeting_title, starting_time, ending_time, room_no) VALUES + ('Intro to Databases', '2025-11-20 09:00', '2025-11-20 10:00', 1), + ('SQL Practice', '2025-11-20 10:30', '2025-11-20 11:30', 2), + ('Node & Postgres', '2025-11-21 09:00', '2025-11-21 10:30', 3), + ('Security Session', '2025-11-21 11:00', '2025-11-21 12:00', 4), + ('Retrospective Meetup', '2025-11-22 15:00', '2025-11-22 16:00', 5); + `); + }); + + console.log("Meetup database created and filled with data."); + } catch (err) { + console.error("Error while setting up meetup database:", err.message); + } +} + +setupMeetup().catch((err) => { + console.error("Unexpected error:", err); +}); + diff --git a/Week1/assignment/exer2.js b/Week1/assignment/exer2.js new file mode 100644 index 000000000..2f78aaab5 --- /dev/null +++ b/Week1/assignment/exer2.js @@ -0,0 +1,127 @@ +const { Client } = require("pg"); + +const DB_USER = "hyfuser"; +const DB_PASSWORD = "hyfpassword"; +const DB_HOST = "localhost"; +const DB_PORT = 5432; + +const client = new Client({ + user: DB_USER, + password: DB_PASSWORD, + host: DB_HOST, + port: DB_PORT, + database: "world", +}); + +async function runQuery(label, sql) { + console.log(`\n=== ${label} ===`); + + const result = await client.query(sql); + console.table(result.rows); +} + +async function runWorldQueries() { + + try { + + await client.connect(); + console.log("Connected to world database"); + + await runQuery("1) Countries with population greater than 8 million", + ` + SELECT name, population + FROM country + WHERE population > 8000000 + ORDER BY population DESC; + `); + + await runQuery('2) Countries that have "land" in their names', + ` + SELECT name + FROM country + WHERE name ILIKE '%land%' + ORDER BY name; + `); + + await runQuery("3) Cities with population between 500,000 and 1,000,000", + ` + SELECT name, population + FROM city + WHERE population BETWEEN 500000 AND 1000000 + ORDER BY population; + `); + + await runQuery("4) Countries on the continent 'Europe'", + ` + SELECT name + FROM country + WHERE continent = 'Europe' + ORDER BY name; + `); + + await runQuery("5) Countries ordered by surface area (descending)", + ` + SELECT name, surfacearea + FROM country + ORDER BY surfacearea DESC; + `); + + await runQuery("6) Cities in the Netherlands", + ` + SELECT c.name + FROM city AS c + JOIN country AS co ON c.countrycode = co.code + WHERE co.name = 'Netherlands' + ORDER BY c.name; + `); + + await runQuery("7) Population of Rotterdam", + ` + SELECT population + FROM city + WHERE name = 'Rotterdam'; + `); + + await runQuery("8) Top 10 countries by surface area", + ` + SELECT name, surfacearea + FROM country + ORDER BY surfacearea DESC + LIMIT 10; + `); + + await runQuery( + "9) Top 10 most populated cities", + ` + SELECT name, population + FROM city + ORDER BY population DESC + LIMIT 10; + `); + + await runQuery( + "10) Total world population", + ` + SELECT SUM(population) AS world_population + FROM country; + `); + + + console.log("\n All queries executed successfully."); + + } catch (err) { + + console.error("Error while running world queries:", err.message); + + } finally { + + await client.end(); + + console.log("Connection closed."); + } +} + + +runWorldQueries().catch((err) => { + console.error("Unexpected error:", err); +}); diff --git a/Week2/assignment/db.js b/Week2/assignment/db.js new file mode 100644 index 000000000..5f1e96f0b --- /dev/null +++ b/Week2/assignment/db.js @@ -0,0 +1,13 @@ +import pg from "pg"; + +const { Pool } = pg; + +const pool = new Pool({ + host: process.env.DB_HOST ?? "localhost", + user: process.env.DB_USER ?? "hyfuser", + password: process.env.DB_PASSWORD ?? "hyfpassword", + database: process.env.DB_NAME ?? "research_w2", + port: Number(process.env.DB_PORT ?? 5432), +}); + +export default pool; diff --git a/Week2/assignment/ex1.js b/Week2/assignment/ex1.js new file mode 100644 index 000000000..0497d03f5 --- /dev/null +++ b/Week2/assignment/ex1.js @@ -0,0 +1,42 @@ +import pool from "./db.js"; + +async function run() { + try { + + await pool.query("DROP TABLE IF EXISTS authors CASCADE;"); + await pool.query(` + CREATE TABLE authors ( + author_id SERIAL PRIMARY KEY, + full_name VARCHAR(120) NOT NULL, + university VARCHAR(120), + date_of_birth DATE, + h_index INTEGER, + gender VARCHAR(10) + ); + `); + + + await pool.query(` + ALTER TABLE authors + ADD COLUMN mentor INTEGER; + `); + + + await pool.query(` + ALTER TABLE authors + ADD CONSTRAINT fk_authors_mentor + FOREIGN KEY (mentor) + REFERENCES authors (author_id) + ON DELETE SET NULL; + `); + + console.log("Exercise 1: authors table + mentor FK created"); + } catch (err) { + console.error("Error in ex1-keys:", err.message); + } finally { + await pool.end(); + console.log("Connection closed"); + } +} + +run(); diff --git a/Week2/assignment/ex2.js b/Week2/assignment/ex2.js new file mode 100644 index 000000000..a54890e5e --- /dev/null +++ b/Week2/assignment/ex2.js @@ -0,0 +1,124 @@ +import pool from "./db.js"; + +const authorRows = [ + ["Noah Clarke", "TU Eindhoven", "1981-04-11", 14, "M"], + ["Mila Verhoef", "Utrecht University", "1986-09-02", 18, "F"], + ["Jonas Peters", "TU Delft", "1979-12-23", 22, "M"], + ["Sara Koster", "University of Twente", "1990-03-19", 11, "F"], + ["Liam de Bruin", "VU Amsterdam", "1984-07-08", 16, "M"], + ["Nadia Rossi", "Leiden University", "1988-01-28", 19, "F"], + ["Omar Haddad", "University of Amsterdam", "1977-10-04", 24, "M"], + ["Elif Kaya", "Radboud University", "1989-06-15", 13, "F"], + ["Lucas Stein", "Ruhr University Bochum", "1982-11-30", 20, "M"], + ["Yara Hussain", "Ghent University", "1991-02-05", 9, "F"], + ["Tobias Meier", "ETH Zürich", "1980-08-12", 26, "M"], + ["Rosa Almeida", "University of Porto", "1987-05-27", 17, "F"], + ["Imran Malik", "KU Leuven", "1992-01-09", 7, "M"], + ["Emma Jansen", "Hanze University", "1993-09-14", 5, "F"], + ["Felix Novak", "Charles University", "1985-12-03", 12, "M"], +]; + +async function seedAuthors() { + + await pool.query("TRUNCATE TABLE authors RESTART IDENTITY CASCADE;"); + + const insertSql = ` + INSERT INTO authors (full_name, university, date_of_birth, h_index, gender) + VALUES ($1, $2, $3, $4, $5); + `; + + for (const row of authorRows) { + await pool.query(insertSql, row); + } + + await pool.query("UPDATE authors SET mentor = 3 WHERE author_id = 1;"); + await pool.query("UPDATE authors SET mentor = 3 WHERE author_id = 2;"); + await pool.query("UPDATE authors SET mentor = 5 WHERE author_id = 4;"); + await pool.query("UPDATE authors SET mentor = 2 WHERE author_id = 5;"); + await pool.query("UPDATE authors SET mentor = 7 WHERE author_id = 6;"); + await pool.query("UPDATE authors SET mentor = 1 WHERE author_id = 8;"); + await pool.query("UPDATE authors SET mentor = 10 WHERE author_id = 9;"); + await pool.query("UPDATE authors SET mentor = 11 WHERE author_id = 10;"); + await pool.query("UPDATE authors SET mentor = 4 WHERE author_id = 11;"); + await pool.query("UPDATE authors SET mentor = 6 WHERE author_id = 12;"); + +} + +async function createPaperTables() { + await pool.query("DROP TABLE IF EXISTS authors_papers;"); + await pool.query("DROP TABLE IF EXISTS research_papers;"); + + await pool.query(` + CREATE TABLE research_papers ( + paper_id SERIAL PRIMARY KEY, + paper_title VARCHAR(200) NOT NULL, + conference VARCHAR(100), + publish_date DATE + ); + `); + + await pool.query(` + CREATE TABLE authors_papers ( + author_id INTEGER REFERENCES authors(author_id) ON DELETE CASCADE, + paper_id INTEGER REFERENCES research_papers(paper_id) ON DELETE CASCADE, + PRIMARY KEY (author_id, paper_id) + ); + `); +} + +async function seedPapersAndLinks() { + + for (let i = 1; i <= 30; i += 1) { + await pool.query( + ` + INSERT INTO research_papers (paper_title, conference, publish_date) + VALUES ($1, $2, $3); + `, + [`Study on Databases ${i}`, `DataConf ${((i - 1) % 5) + 1}`, `202${i % 4}-0${((i - 1) % 9) + 1}-15`] + ); + } + + + const links = [ + [1, 1], [1, 2], [2, 3], + [3, 4], [3, 5], + [4, 6], + [5, 7], [5, 8], [5, 9], + [6, 10], + [7, 11], [7, 12], + [8, 13], + [9, 14], [9, 15], + [10, 16], [10, 17], + [11, 18], [11, 19], [11, 20], + [12, 21], + [13, 22], + [14, 23], [14, 24], + [1, 25], [2, 26], [3, 27], [4, 28], [6, 29], [7, 30], + ]; + + for (const [authorId, paperId] of links) { + await pool.query( + ` + INSERT INTO authors_papers (author_id, paper_id) + VALUES ($1, $2); + `, + [authorId, paperId] + ); + } +} + +async function run() { + try { + await createPaperTables(); + await seedAuthors(); + await seedPapersAndLinks(); + console.log("Exercise 2: tables created and data inserted"); + } catch (err) { + console.error("Error in ex2-relationships:", err.message); + } finally { + await pool.end(); + console.log("Connection closed"); + } +} + +run(); diff --git a/Week2/assignment/ex3.js b/Week2/assignment/ex3.js new file mode 100644 index 000000000..6357e4e3f --- /dev/null +++ b/Week2/assignment/ex3.js @@ -0,0 +1,48 @@ +import pool from "./db.js"; + +async function run() { + try { + + const q1 = ` + SELECT + a.author_id, + a.full_name AS author_name, + m.full_name AS mentor_name + FROM authors a + LEFT JOIN authors m + ON a.mentor = m.author_id + ORDER BY a.author_id; + `; + const res1 = await pool.query(q1); + console.log("\nAuthors with their mentors"); + console.table(res1.rows); + + + const q2 = ` + SELECT + a.author_id, + a.full_name, + a.university, + a.date_of_birth, + a.h_index, + a.gender, + rp.paper_title + FROM authors a + LEFT JOIN authors_papers ap + ON a.author_id = ap.author_id + LEFT JOIN research_papers rp + ON ap.paper_id = rp.paper_id + ORDER BY a.author_id, rp.paper_title; + `; + const res2 = await pool.query(q2); + console.log("\nAuthors with their paper titles (if any)"); + console.table(res2.rows); + } catch (err) { + console.error("Error in ex3-joins:", err.message); + } finally { + await pool.end(); + console.log("Connection closed"); + } +} + +run(); diff --git a/Week2/assignment/ex4.js b/Week2/assignment/ex4.js new file mode 100644 index 000000000..a44c25e3b --- /dev/null +++ b/Week2/assignment/ex4.js @@ -0,0 +1,83 @@ +import pool from "./db.js"; + +async function run() { + try { + + const q1 = ` + SELECT + rp.paper_id, + rp.paper_title, + COUNT(ap.author_id) AS author_count + FROM research_papers rp + LEFT JOIN authors_papers ap + ON rp.paper_id = ap.paper_id + GROUP BY rp.paper_id, rp.paper_title + ORDER BY rp.paper_id; + `; + const res1 = await pool.query(q1); + console.log("\n1) Papers and number of authors"); + console.table(res1.rows); + + const q2 = ` + SELECT + COUNT(DISTINCT ap.paper_id) AS total_papers_by_female_authors + FROM authors a + JOIN authors_papers ap + ON a.author_id = ap.author_id + WHERE a.gender = 'F'; + `; + + const res2 = await pool.query(q2); + console.log("\n2) Total papers by female authors"); + console.table(res2.rows); + + const q3 = ` + SELECT + university, + AVG(h_index) AS average_h_index + FROM authors + GROUP BY university + ORDER BY university; + `; + + const res3 = await pool.query(q3); + console.log("\n3) Average h-index per university"); + console.table(res3.rows); + + + const q4 = ` + SELECT + a.university, + COUNT(DISTINCT ap.paper_id) AS total_papers + FROM authors a + LEFT JOIN authors_papers ap + ON a.author_id = ap.author_id + GROUP BY a.university + ORDER BY a.university; + `; + const res4 = await pool.query(q4); + console.log("\n4) Total papers per university"); + console.table(res4.rows); + + const q5 = ` + SELECT + university, + MIN(h_index) AS min_h_index, + MAX(h_index) AS max_h_index + FROM authors + GROUP BY university + ORDER BY university; + `; + + const res5 = await pool.query(q5); + console.log("\n5) Min/Max h-index per university"); + console.table(res5.rows); + } catch (err) { + console.error("Error in ex4-aggregate-functions:", err.message); + } finally { + await pool.end(); + console.log("Connection closed"); + } +} + +run(); diff --git a/Week3/assignment/db-client.js b/Week3/assignment/db-client.js new file mode 100644 index 000000000..6cb394b1e --- /dev/null +++ b/Week3/assignment/db-client.js @@ -0,0 +1,13 @@ +const { Client } = require("pg"); + +function createClient() { + return new Client({ + user: "hyfuser", + password: "hyfpassword", + host: "localhost", + port: 5432, + database: "world", + }); +} + +module.exports = { createClient }; diff --git a/Week3/assignment/exe-3.1.md b/Week3/assignment/exe-3.1.md new file mode 100644 index 000000000..7d0e5e48b --- /dev/null +++ b/Week3/assignment/exe-3.1.md @@ -0,0 +1,59 @@ +# 3.1 – SQL Normalization + +## 1. What columns violate 1NF? + +1NF says: each column must contain **atomic (single) values**, no lists. + +In the given table the columns that violate 1NF are: + +- `food_code` – contains multiple values in one cell (e.g. `C1, C2`) +- `food_description` – also contains multiple values in one cell + that belong to different food items (e.g. `Curry, Cake`) + +So: **`food_code` and `food_description` violate 1NF.** + +--- + +## 2. What entities do you recognize that could be extracted? + +From the table we can recognise at least these entities: + +- **Member** – information about the club member +- **Dinner** – a specific dinner event for a member +- **Venue** – where the dinner takes place +- **Food** – the types of food served at a dinner + +--- + +## 3. Name all the tables and columns that would make a 3NF solution + +A 3NF-compliant design could be: + +### `members` + +- `member_id` (PK) +- `member_name` +- `member_address` + +### `venues` + +- `venue_code` (PK) +- `venue_description` + +### `dinners` + +- `dinner_id` (PK) +- `member_id` (FK → members.member_id) +- `dinner_date` +- `venue_code` (FK → venues.venue_code) + +### `foods` + +- `food_code` (PK) +- `food_description` + +### `dinner_foods` (link table, because one dinner has many foods) + +- `dinner_id` (FK → dinners.dinner_id) +- `food_code` (FK → foods.food_code) +- **PK** on (`dinner_id`, `food_code`) diff --git a/Week3/assignment/injection.js b/Week3/assignment/injection.js new file mode 100644 index 000000000..42101efa8 --- /dev/null +++ b/Week3/assignment/injection.js @@ -0,0 +1,11 @@ +function getPopulation(Country, name, code, cb) { + + const sql = `SELECT population FROM ${Country} WHERE name = $1 AND code = $2`; + + conn.query(sql, [name, code], function (err, result) { + if (err) return cb(err); + if (result.rows.length === 0) return cb(new Error("Not found")); + + cb(null, result.rows[0].population); + }); +} diff --git a/Week3/assignment/transaction.js b/Week3/assignment/transaction.js new file mode 100644 index 000000000..9518b3db0 --- /dev/null +++ b/Week3/assignment/transaction.js @@ -0,0 +1,47 @@ +const { createClient } = require("./db-client"); + +async function main() { + const client = createClient(); + + const fromAcc = 101; + const toAcc = 102; + const amount = 1000.0; + + try { + await client.connect(); + await client.query("BEGIN"); + + await client.query( + "UPDATE account SET balance = balance - $1 WHERE account_number = $2", + [amount, fromAcc] + ); + + await client.query( + `INSERT INTO account_changes (account_number, amount, remark) + VALUES ($1, $2, $3)`, + [fromAcc, -amount, "Transfer to 102"] + ); + + await client.query( + "UPDATE account SET balance = balance + $1 WHERE account_number = $2", + [amount, toAcc] + ); + + await client.query( + `INSERT INTO account_changes (account_number, amount, remark) + VALUES ($1, $2, $3)`, + [toAcc, amount, "Transfer from 101"] + ); + + + await client.query("COMMIT"); + console.log("Transferred 1000 from 101 to 102 in a single transaction."); + } catch (err) { + console.error("Error, rolling back:", err); + await client.query("ROLLBACK"); + } finally { + await client.end(); + } +} + +main(); diff --git a/Week3/assignment/transactions-create-tables.js b/Week3/assignment/transactions-create-tables.js new file mode 100644 index 000000000..f3913530c --- /dev/null +++ b/Week3/assignment/transactions-create-tables.js @@ -0,0 +1,35 @@ +const { createClient } = require("./db-client"); + +async function main() { + const client = createClient(); + + const createTablesSql = ` + DROP TABLE IF EXISTS account_changes; + DROP TABLE IF EXISTS account; + + CREATE TABLE account ( + account_number INTEGER PRIMARY KEY, + balance NUMERIC(12, 2) NOT NULL + ); + + CREATE TABLE account_changes ( + change_number SERIAL PRIMARY KEY, + account_number INTEGER NOT NULL REFERENCES account(account_number), + amount NUMERIC(12, 2) NOT NULL, + changed_date TIMESTAMP NOT NULL DEFAULT NOW(), + remark TEXT + ); + `; + + try { + await client.connect(); + await client.query(createTablesSql); + console.log("Tables 'account' and 'account_changes' created."); + } catch (err) { + console.error("Error creating tables:", err); + } finally { + await client.end(); + } +} + +main(); diff --git a/Week3/assignment/transactions-insert-values.js b/Week3/assignment/transactions-insert-values.js new file mode 100644 index 000000000..f61472120 --- /dev/null +++ b/Week3/assignment/transactions-insert-values.js @@ -0,0 +1,30 @@ +const { createClient } = require("./db-client"); + +async function main() { + const client = createClient(); + + const insertSql = ` + INSERT INTO account (account_number, balance) + VALUES + (101, 5000.00), + (102, 2000.00) + ON CONFLICT (account_number) DO NOTHING; + + INSERT INTO account_changes (account_number, amount, remark) + VALUES + (101, 5000.00, 'Initial deposit'), + (102, 2000.00, 'Initial deposit'); + `; + + try { + await client.connect(); + await client.query(insertSql); + console.log("Sample data inserted into 'account' and 'account_changes'."); + } catch (err) { + console.error("Error inserting data:", err); + } finally { + await client.end(); + } +} + +main(); diff --git a/Week3/homework/mongodb/index.js b/Week3/homework/mongodb/index.js index 41ee8b618..f942c4649 100644 --- a/Week3/homework/mongodb/index.js +++ b/Week3/homework/mongodb/index.js @@ -1,3 +1,5 @@ +require("dotenv").config(); + const { MongoClient, ServerApiVersion } = require("mongodb"); const { seedDatabase } = require("./seedDatabase.js"); @@ -12,6 +14,17 @@ async function createEpisodeExercise(client) { */ // Write code that will add this to the collection! + const collection = client + .db("databaseweek3") + .collection("bob_ross_episodes"); + + const newEpisode = { + episode: "S09E13", + title: "MOUNTAIN HIDE-AWAY", + elements: ["CIRRUS", "CLOUDS", "CONIFER", "DECIDIOUS", "GRASS", "MOUNTAIN", "MOUNTAINS", "RIVER", "SNOWY_MOUNTAIN", "TREE", "TREES"], +}; + + const result = await collection.insertOne(newEpisode); console.log( `Created season 9 episode 13 and the document got the id ${"TODO: fill in variable here"}` @@ -23,26 +36,53 @@ async function findEpisodesExercises(client) { * Complete the following exercises. * The comments indicate what to do and what the result should be! */ + const collection = client + .db("databaseweek3") + .collection("bob_ross_episodes"); // Find the title of episode 2 in season 2 [Should be: WINTER SUN] + const s02e02 = await collection.findOne( + { episode: "S02E02" }, + { projection: { title: 1, _id: 0 }} +); + const winterSunTitle = s02e02.title; console.log( `The title of episode 2 in season 2 is ${"TODO: fill in variable here"}` ); // Find the season and episode number of the episode called "BLACK RIVER" [Should be: S02E06] + const blackRiver = await collection.findOne( + { title: "BLACK RIVER" }, + { projection: { episode: 1, _id: 0 } } +); + + const blackRiverCode = blackRiver?.episode; console.log( `The season and episode number of the "BLACK RIVER" episode is ${"TODO: fill in variable here"}` ); // Find all of the episode titles where Bob Ross painted a CLIFF [Should be: NIGHT LIGHT, EVENING SEASCAPE, SURF'S UP, CLIFFSIDE, BY THE SEA, DEEP WILDERNESS HOME, CRIMSON TIDE, GRACEFUL WATERFALL] + const cliffEpisodes = await collection + .find({ elements: "CLIFF" }, { projection: { title: 1, _id: 0 } }) + .toArray(); + const cliffTitles = cliffEpisodes.map((doc) => doc.title).join(", "); console.log( `The episodes that Bob Ross painted a CLIFF are ${"TODO: fill in variable here"}` ); // Find all of the episode titles where Bob Ross painted a CLIFF and a LIGHTHOUSE [Should be: NIGHT LIGHT] + const cliffLighthouseEpisodes = await collection + .find( + { elements: { $all: ["CLIFF", "LIGHTHOUSE"] } }, + { projection: { title: 1, _id: 0 } } + ) + .toArray(); + const cliffLighthouseTitles = cliffLighthouseEpisodes + .map((doc) => doc.title) + .join(", "); console.log( `The episodes that Bob Ross painted a CLIFF and a LIGHTHOUSE are ${"TODO: fill in variable here"}` @@ -56,8 +96,15 @@ async function updateEpisodeExercises(client) { * * Note: do NOT change the data.json file */ + const collection = client + .db("databaseWeek3") + .collection("bob_ross_episodes"); // Episode 13 in season 30 should be called BLUE RIDGE FALLS, yet it is called BLUE RIDGE FALLERS now. Fix that + const titleResult = await collection.updateOne( + { episode: "S30E13" }, + { $set: { title: "BLUE RIDGE FALLS" } } + ); console.log( `Ran a command to update episode 13 in season 30 and it updated ${"TODO: fill in variable here"} episodes` @@ -66,6 +113,10 @@ async function updateEpisodeExercises(client) { // Unfortunately we made a mistake in the arrays and the element type called 'BUSHES' should actually be 'BUSH' as sometimes only one bush was painted. // Update all of the documents in the collection that have `BUSHES` in the elements array to now have `BUSH` // It should update 120 episodes! + const bushesResult = await collection.updateMany( + { elements: "BUSHES" }, + { $set: { "elements.$": "BUSH" } } // change the matched element in the array + ); console.log( `Ran a command to update all the BUSHES to BUSH and it updated ${"TODO: fill in variable here"} episodes` @@ -77,6 +128,11 @@ async function deleteEpisodeExercise(client) { * It seems an errand episode has gotten into our data. * This is episode 14 in season 31. Please remove it and verify that it has been removed! */ + const collection = client + .db("databaseWeek3") + .collection("bob_ross_episodes"); + + const deleteResult = await collection.deleteOne({ episode: "S31E14" }); console.log( `Ran a command to delete episode and it deleted ${"TODO: fill in variable here"} episodes` @@ -89,13 +145,16 @@ async function main() { `You did not set up the environment variables correctly. Did you create a '.env' file and add a package to create it?` ); } - const client = new MongoClient(process.env.MONGODB_URL, { - useNewUrlParser: true, - useUnifiedTopology: true, - serverApi: ServerApiVersion.v1, - }); - try { + const client = new MongoClient(process.env.MONGODB_URL, { + serverApi: { + version: ServerApiVersion.v1, + strict: true, + deprecationErrors: true, + }, + }); + + try { await client.connect(); // Seed our database diff --git a/Week4/homework/ex1-aggregation/aggregation.js b/Week4/homework/ex1-aggregation/aggregation.js new file mode 100644 index 000000000..b908956cc --- /dev/null +++ b/Week4/homework/ex1-aggregation/aggregation.js @@ -0,0 +1,149 @@ +const { MongoClient } = require("mongodb"); +const fs = require("fs"); +const path = require("path"); + +const uri = "mongodb://localhost:27017"; +const dbName = "databaseweek4"; +const collectionName = "population"; + + +async function withCollection(callback) { + const client = new MongoClient(uri); + + try { + await client.connect(); + const db = client.db(dbName); + const collection = db.collection(collectionName); + return await callback(collection); + } finally { + await client.close(); + } +} + + +async function importCsv() { + return withCollection(async (collection) => { + console.log("Clearing existing population data..."); + await collection.deleteMany({}); + + const csvPath = path.join(__dirname, "population_pyramid_1950-2022.csv"); + const csvText = fs.readFileSync(csvPath, "utf-8"); + + const lines = csvText.split("\n").slice(1); + const docs = []; + + for (const rawLine of lines) { + const line = rawLine.trim(); + if (!line) continue; + + const [country, yearStr, age, mStr, fStr] = line.split(","); + + docs.push({ + Country: country.trim(), + Year: Number(yearStr), + Age: age.trim(), + M: Number(mStr), + F: Number(fStr), + }); + } + + if (docs.length === 0) { + console.warn("No data found in CSV – check the file."); + return; + } + + await collection.insertMany(docs); + console.log(`Inserted ${docs.length} population documents.`); + }); +} + + +async function getTotalPopulationPerYear(countryName) { + return withCollection(async (collection) => { + const results = await collection + .aggregate([ + { $match: { Country: countryName } }, + { + $group: { + _id: "$Year", + countPopulation: { + $sum: { $add: ["$M", "$F"] }, + }, + }, + }, + { $sort: { _id: 1 } }, + ]) + .toArray(); + + return results; + }); +} + + +async function getContinentsByYearAndAge(year, age) { + const continents = [ + "AFRICA", + "ASIA", + "EUROPE", + "LATIN AMERICA AND THE CARIBBEAN", + "NORTHERN AMERICA", + "OCEANIA", + ]; + + return withCollection(async (collection) => { + const results = await collection + .aggregate([ + { + $match: { + Year: year, + Age: age, + Country: { $in: continents }, + }, + }, + { + + $group: { + _id: "$Country", + Country: { $first: "$Country" }, + Year: { $first: "$Year" }, + Age: { $first: "$Age" }, + M: { $sum: "$M" }, + F: { $sum: "$F" }, + }, + }, + { + $addFields: { + TotalPopulation: { $add: ["$M", "$F"] }, + }, + }, + { $sort: { Country: 1 } }, + ]) + .toArray(); + + return results; + }); +} + + +async function main() { + console.log("=== Importing CSV into MongoDB ==="); + await importCsv(); + + console.log("\n=== Exercise 1.2 – Netherlands total population per year ==="); + const nl = await getTotalPopulationPerYear("Netherlands"); + console.log(JSON.stringify(nl, null, 2)); + + console.log('\n=== Exercise 1.3 – Continents (Year: 2020, Age: "100+") ==='); + const continents = await getContinentsByYearAndAge(2020, "100+"); + console.log(JSON.stringify(continents, null, 2)); +} + +module.exports = { + importCsv, + getTotalPopulationPerYear, + getContinentsByYearAndAge, +}; + +if (require.main === module) { + main().catch(console.error); +} diff --git a/Week4/homework/ex1-aggregation/output1.png b/Week4/homework/ex1-aggregation/output1.png new file mode 100644 index 000000000..1d90928c1 Binary files /dev/null and b/Week4/homework/ex1-aggregation/output1.png differ diff --git a/Week4/homework/ex1-aggregation/output2.png b/Week4/homework/ex1-aggregation/output2.png new file mode 100644 index 000000000..f37737112 Binary files /dev/null and b/Week4/homework/ex1-aggregation/output2.png differ diff --git a/Week4/homework/ex2-transactions/setup.js b/Week4/homework/ex2-transactions/setup.js new file mode 100644 index 000000000..ef4540b81 --- /dev/null +++ b/Week4/homework/ex2-transactions/setup.js @@ -0,0 +1,80 @@ +const { MongoClient } = require("mongodb"); + +const uri = "mongodb://localhost:27017"; +const dbName = "databaseweek4"; +const collectionName = "accounts"; + +async function setupAccounts() { + const client = new MongoClient(uri); + + try { + await client.connect(); + console.log("Connected to MongoDB for setup"); + + const db = client.db(dbName); + const collection = db.collection(collectionName); + + + await collection.deleteMany({}); + console.log("Cleared existing accounts"); + + const now = new Date(); + + + const accounts = [ + { + account_number: 101, + balance: 5000, + account_changes: [ + { + change_number: 1, + amount: 5000, + changed_date: now, + remark: "Initial deposit", + }, + ], + }, + { + account_number: 102, + balance: 3000, + account_changes: [ + { + change_number: 1, + amount: 3000, + changed_date: now, + remark: "Initial deposit", + }, + ], + }, + { + account_number: 103, + balance: 7500, + account_changes: [ + { + change_number: 1, + amount: 7500, + changed_date: now, + remark: "Initial deposit", + }, + ], + }, + ]; + + await collection.insertMany(accounts); + console.log(`Created ${accounts.length} accounts`); + + const allAccounts = await collection.find({}).toArray(); + console.log("\nAccounts created:"); + allAccounts.forEach((acc) => { + console.log(`Account ${acc.account_number}: Balance = ${acc.balance}`); + }); + } finally { + await client.close(); + } +} + +module.exports = { setupAccounts }; + +if (require.main === module) { + setupAccounts().catch(console.error); +} diff --git a/Week4/homework/ex2-transactions/transfer.js b/Week4/homework/ex2-transactions/transfer.js new file mode 100644 index 000000000..8b2df5be4 --- /dev/null +++ b/Week4/homework/ex2-transactions/transfer.js @@ -0,0 +1,130 @@ +const { MongoClient } = require("mongodb"); + +const uri = "mongodb://localhost:27017"; +const dbName = "databaseweek4"; +const collectionName = "accounts"; + + +async function transfer(fromAccount, toAccount, amount, remark) { + const client = new MongoClient(uri); + + try { + await client.connect(); + const db = client.db(dbName); + const accounts = db.collection(collectionName); + + const session = client.startSession(); + + try { + + await session.withTransaction(async () => { + const [from, to] = await Promise.all([ + accounts.findOne({ account_number: fromAccount }, { session }), + accounts.findOne({ account_number: toAccount }, { session }), + ]); + + if (!from) throw new Error(`Account ${fromAccount} not found`); + if (!to) throw new Error(`Account ${toAccount} not found`); + if (amount <= 0) throw new Error("Amount must be positive"); + if (from.balance < amount) { + throw new Error(`Insufficient balance in account ${fromAccount}`); + } + + + const latestChange = await accounts + .aggregate( + [ + { + $unwind: { + path: "$account_changes", + preserveNullAndEmptyArrays: true, + }, + }, + { + $group: { + _id: null, + maxChange: { $max: "$account_changes.change_number" }, + }, + }, + ], + { session } + ) + .toArray(); + + const currentMax = latestChange[0]?.maxChange || 0; + const nextChangeNumber = currentMax + 1; + const now = new Date(); + + + const senderResult = await accounts.updateOne( + { account_number: fromAccount }, + { + $inc: { balance: -amount }, + $push: { + account_changes: { + change_number: nextChangeNumber, + amount: -amount, + changed_date: now, + remark, + }, + }, + }, + { session } + ); + + const receiverResult = await accounts.updateOne( + { account_number: toAccount }, + { + $inc: { balance: amount }, + $push: { + account_changes: { + change_number: nextChangeNumber, + amount, + changed_date: now, + remark, + }, + }, + }, + { session } + ); + + if ( + senderResult.modifiedCount !== 1 || + receiverResult.modifiedCount !== 1 + ) { + throw new Error("Transfer failed to update one or both accounts"); + } + }); + + + console.log( + `Transfer successful: ${amount} from ${fromAccount} to ${toAccount}` + ); + + const updated = await accounts + .find( + { account_number: { $in: [fromAccount, toAccount] } }, + { projection: { _id: 0 } } + ) + .toArray(); + + console.log("Updated accounts:"); + console.log(JSON.stringify(updated, null, 2)); + } finally { + + await session.endSession(); + } + } finally { + await client.close(); + } +} + + +if (require.main === module) { + transfer(101, 102, 1000, "Payment for services").catch((err) => { + console.error("Transfer error:", err.message); + process.exit(1); + }); +} + +module.exports = { transfer }; diff --git a/Week4/homework/ex2-transactions/transferOutPut.png b/Week4/homework/ex2-transactions/transferOutPut.png new file mode 100644 index 000000000..125169bad Binary files /dev/null and b/Week4/homework/ex2-transactions/transferOutPut.png differ diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 000000000..e5a0181e8 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,327 @@ +{ + "name": "databases-cohort54", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "databases-cohort54", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "dotenv": "^17.2.3", + "mongodb": "^7.0.0", + "pg": "^8.16.3" + }, + "devDependencies": {} + }, + "node_modules/@mongodb-js/saslprep": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.3.2.tgz", + "integrity": "sha512-QgA5AySqB27cGTXBFmnpifAi7HxoGUeezwo6p9dI03MuDB6Pp33zgclqVb6oVK3j6I9Vesg0+oojW2XxB59SGg==", + "license": "MIT", + "dependencies": { + "sparse-bitfield": "^3.0.3" + } + }, + "node_modules/@types/webidl-conversions": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz", + "integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==", + "license": "MIT" + }, + "node_modules/@types/whatwg-url": { + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-13.0.0.tgz", + "integrity": "sha512-N8WXpbE6Wgri7KUSvrmQcqrMllKZ9uxkYWMt+mCSGwNc0Hsw9VQTW7ApqI4XNrx6/SaM2QQJCzMPDEXE058s+Q==", + "license": "MIT", + "dependencies": { + "@types/webidl-conversions": "*" + } + }, + "node_modules/bson": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/bson/-/bson-7.0.0.tgz", + "integrity": "sha512-Kwc6Wh4lQ5OmkqqKhYGKIuELXl+EPYSCObVE6bWsp1T/cGkOCBN0I8wF/T44BiuhHyNi1mmKVPXk60d41xZ7kw==", + "license": "Apache-2.0", + "engines": { + "node": ">=20.19.0" + } + }, + "node_modules/dotenv": { + "version": "17.2.3", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.3.tgz", + "integrity": "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/memory-pager": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", + "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==", + "license": "MIT" + }, + "node_modules/mongodb": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-7.0.0.tgz", + "integrity": "sha512-vG/A5cQrvGGvZm2mTnCSz1LUcbOPl83hfB6bxULKQ8oFZauyox/2xbZOoGNl+64m8VBrETkdGCDBdOsCr3F3jg==", + "license": "Apache-2.0", + "dependencies": { + "@mongodb-js/saslprep": "^1.3.0", + "bson": "^7.0.0", + "mongodb-connection-string-url": "^7.0.0" + }, + "engines": { + "node": ">=20.19.0" + }, + "peerDependencies": { + "@aws-sdk/credential-providers": "^3.806.0", + "@mongodb-js/zstd": "^7.0.0", + "gcp-metadata": "^7.0.1", + "kerberos": "^7.0.0", + "mongodb-client-encryption": ">=7.0.0 <7.1.0", + "snappy": "^7.3.2", + "socks": "^2.8.6" + }, + "peerDependenciesMeta": { + "@aws-sdk/credential-providers": { + "optional": true + }, + "@mongodb-js/zstd": { + "optional": true + }, + "gcp-metadata": { + "optional": true + }, + "kerberos": { + "optional": true + }, + "mongodb-client-encryption": { + "optional": true + }, + "snappy": { + "optional": true + }, + "socks": { + "optional": true + } + } + }, + "node_modules/mongodb-connection-string-url": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-7.0.0.tgz", + "integrity": "sha512-irhhjRVLE20hbkRl4zpAYLnDMM+zIZnp0IDB9akAFFUZp/3XdOfwwddc7y6cNvF2WCEtfTYRwYbIfYa2kVY0og==", + "license": "Apache-2.0", + "dependencies": { + "@types/whatwg-url": "^13.0.0", + "whatwg-url": "^14.1.0" + }, + "engines": { + "node": ">=20.19.0" + } + }, + "node_modules/pg": { + "version": "8.16.3", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz", + "integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==", + "license": "MIT", + "dependencies": { + "pg-connection-string": "^2.9.1", + "pg-pool": "^3.10.1", + "pg-protocol": "^1.10.3", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.2.7" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.2.7.tgz", + "integrity": "sha512-YgCtzMH0ptvZJslLM1ffsY4EuGaU0cx4XSdXLRFae8bPP4dS5xL1tNB3k2o/N64cHJpwU7dxKli/nZ2lUa5fLg==", + "license": "MIT", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.9.1", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.9.1.tgz", + "integrity": "sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==", + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.10.1.tgz", + "integrity": "sha512-Tu8jMlcX+9d8+QVzKIvM/uJtp07PKr82IUOYEphaWcoBhIYkoHpLXN3qO59nAI11ripznDsEzEv8nUxBVWajGg==", + "license": "MIT", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.10.3.tgz", + "integrity": "sha512-6DIBgBQaTKDJyxnXaLiLR8wBpQQcGWuAESkRBX/t6OwA8YsqP+iVSiond2EDy6Y/dsGk8rh/jtax3js5NeV7JQ==", + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/sparse-bitfield": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", + "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", + "license": "MIT", + "dependencies": { + "memory-pager": "^1.0.2" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/tr46": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz", + "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==", + "license": "MIT", + "dependencies": { + "punycode": "^2.3.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/whatwg-url": { + "version": "14.2.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz", + "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==", + "license": "MIT", + "dependencies": { + "tr46": "^5.1.0", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "license": "MIT", + "engines": { + "node": ">=0.4" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 000000000..4008348a2 --- /dev/null +++ b/package.json @@ -0,0 +1,25 @@ +{ + "name": "databases-cohort54", + "version": "1.0.0", + "description": "> If you are following the HackYourFuture curriculum we recommend you to start with module\r > 1: [CLI/GIT](https://github.com/HackYourFuture/CLI-Git). To get a complete overview of the HackYourFuture\r > curriculum first, click [here](https://github.com/HackYourFuture/curriculum).", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/hkarimx/databases-cohort54.git" + }, + "keywords": [], + "author": "", + "license": "ISC", + "bugs": { + "url": "https://github.com/hkarimx/databases-cohort54/issues" + }, + "homepage": "https://github.com/hkarimx/databases-cohort54#readme", + "dependencies": { + "dotenv": "^17.2.3", + "mongodb": "^7.0.0", + "pg": "^8.16.3" + } +}