From 5ebef26e9917d36a1a29942fbd367b5b7bc2fba3 Mon Sep 17 00:00:00 2001 From: mahtabmardani88 Date: Tue, 6 Aug 2024 13:30:17 +0200 Subject: [PATCH 1/5] mahtab mardani update week1 database assignment --- Week1/meetup_DataBase.js | 124 +++++++++++++++++++++++++++++++++++++++ Week1/recipe_DataBase.js | 38 ++++++++++++ Week1/world_queries.js | 44 ++++++++++++++ package-lock.json | 105 +++++++++++++++++++++++++++++++++ package.json | 5 ++ 5 files changed, 316 insertions(+) create mode 100644 Week1/meetup_DataBase.js create mode 100644 Week1/recipe_DataBase.js create mode 100644 Week1/world_queries.js create mode 100644 package-lock.json create mode 100644 package.json diff --git a/Week1/meetup_DataBase.js b/Week1/meetup_DataBase.js new file mode 100644 index 000000000..c45df2120 --- /dev/null +++ b/Week1/meetup_DataBase.js @@ -0,0 +1,124 @@ +const util = require("util"); +const mysql = require("mysql"); + +const connection = mysql.createConnection({ + host: "localhost", + user: "hyfuser", + password: "hyfpassword", + database: 'meetup' +}); + +const executeQuery = util.promisify(connection.query.bind(connection)); + +async function seedDatabase() { + const CREATE_DATABASE = `CREATE DATABASE IF NOT EXISTS meetup`; + const USE_DATABASE = `USE meetup`; + + const CREATE_INVITEE_TABLE = ` + CREATE TABLE IF NOT EXISTS Invitee + ( + invitee_no INT AUTO_INCREMENT , + invitee_name VARCHAR(50), + invited_by VARCHAR(50), + PRIMARY KEY(invitee_no) + );`; + + const CREATE_ROOM_TABLE = ` + CREATE TABLE IF NOT EXISTS Room + ( + room_no INT AUTO_INCREMENT , + room_name VARCHAR(50), + floor_number INT, + PRIMARY KEY(room_no) + );`; + + const CREATE_MEETING_TABLE = ` + CREATE TABLE IF NOT EXISTS Meeting + ( + meeting_no INT AUTO_INCREMENT, + meeting_title VARCHAR(50), + starting_time DATETIME, + ending_time DATETIME, + room_no INT, + PRIMARY KEY(meeting_no), + FOREIGN KEY(room_no) REFERENCES Room(room_no) + );`; + + const invitees = [ + { invitee_name: "Alice", invited_by: "Bob" }, + { invitee_name: "Bob", invited_by: "Charlie" }, + { invitee_name: "Charlie", invited_by: "Alice" }, + { invitee_name: "Dave", invited_by: "Eve" }, + { invitee_name: "Eve", invited_by: "Dave" } + ]; + + const rooms = [ + { room_name: "Conference Room A", floor_number: 1 }, + { room_name: "Conference Room B", floor_number: 2 }, + { room_name: "Conference Room C", floor_number: 3 }, + { room_name: "Conference Room D", floor_number: 4 }, + { room_name: "Conference Room E", floor_number: 5 } + ]; + + const meetings = [ + { + meeting_title: "Project Kickoff", + starting_time: "2024-01-01 10:00:00", + ending_time: "2024-01-01 11:00:00", + room_no: 1 + }, + { + meeting_title: "Design Review", + starting_time: "2024-01-02 14:00:00", + ending_time: "2024-01-02 15:00:00", + room_no: 2 + }, + { + meeting_title: "Sprint Planning", + starting_time: "2024-01-03 09:00:00", + ending_time: "2024-01-03 10:00:00", + room_no: 3 + }, + { + meeting_title: "Team Building", + starting_time: "2024-01-04 16:00:00", + ending_time: "2024-01-04 17:00:00", + room_no: 4 + }, + { + meeting_title: "Retrospective", + starting_time: "2024-01-05 11:00:00", + ending_time: "2024-01-05 12:00:00", + room_no: 5 + } + ]; + + connection.connect(); + try { + await executeQuery(CREATE_DATABASE); + await executeQuery(USE_DATABASE); + await Promise.all([ + executeQuery(CREATE_INVITEE_TABLE), + executeQuery(CREATE_ROOM_TABLE), + executeQuery(CREATE_MEETING_TABLE) + ]); + await Promise.all( + invitees.map((invitee) => + executeQuery("INSERT INTO Invitee SET ?", invitee) + ) + ); + await Promise.all( + rooms.map((room) => executeQuery("INSERT INTO Room SET ?", room)) + ); + await Promise.all( + meetings.map((meeting) => + executeQuery("INSERT INTO Meeting SET ?", meeting) + ) + ); + } catch (error) { + console.error(error); + } + + connection.end(); +} +seedDatabase(); diff --git a/Week1/recipe_DataBase.js b/Week1/recipe_DataBase.js new file mode 100644 index 000000000..172be78fc --- /dev/null +++ b/Week1/recipe_DataBase.js @@ -0,0 +1,38 @@ +const mysql = require("mysql"); +const connection = mysql.createConnection({ + host: "localhost", + user: "hyfuser", + password: "hyfpassword", + database: "recipe_db", + port: 3306 +}); +connection.connect(); + +const create_dataBase_query = "CREATE DATABASE IF NOT EXISTS recipe_db"; +connection.query(create_dataBase_query, function (error, results, fields) { + if (error) throw error; + console.log("Database 'recipe_db' created or already exists."); + + const use_database_query = "USE recipe_db"; + connection.query(use_database_query, function (error, results, fields) { + console.log("Using 'recipe_db' database."); + + const CREATE_TABLE = [ + "CREATE TABLE IF NOT EXISTS Recipe (recipe_id INT AUTO_INCREMENT, recipe_name VARCHAR(255) NOT NULL, PRIMARY KEY(recipe_id))", + "CREATE TABLE IF NOT EXISTS Category (category_id INT AUTO_INCREMENT, category_name VARCHAR(255) NOT NULL, PRIMARY KEY(category_id))", + "CREATE TABLE IF NOT EXISTS Ingredient (ingredient_id INT AUTO_INCREMENT, ingredient_name VARCHAR(255) NOT NULL, PRIMARY KEY(ingredient_id))", + "CREATE TABLE IF NOT EXISTS Step (step_id INT AUTO_INCREMENT, step_description TEXT NOT NULL, PRIMARY KEY(step_id))", + "CREATE TABLE IF NOT EXISTS RecipeCategory (recipe_id INT, category_id INT, PRIMARY KEY(recipe_id, category_id), FOREIGN KEY(recipe_id) REFERENCES Recipe(recipe_id), FOREIGN KEY(category_id) REFERENCES Category(category_id))", + "CREATE TABLE IF NOT EXISTS RecipeIngredient (recipe_id INT, ingredient_id INT, PRIMARY KEY(recipe_id, ingredient_id), FOREIGN KEY(recipe_id) REFERENCES Recipe(recipe_id), FOREIGN KEY(ingredient_id) REFERENCES Ingredient(ingredient_id))", + "CREATE TABLE IF NOT EXISTS RecipeStep (recipe_id INT, step_id INT, step_order INT, PRIMARY KEY(recipe_id, step_id), FOREIGN KEY(recipe_id) REFERENCES Recipe(recipe_id), FOREIGN KEY(step_id) REFERENCES Step(step_id))" + ]; + + CREATE_TABLE.forEach((query) => { + connection.query(query, function (error, results, fields) { + if (error) throw error; + console.log(`Table created or already exists.`); + }); + }); + connection.end(); + }); +}); diff --git a/Week1/world_queries.js b/Week1/world_queries.js new file mode 100644 index 000000000..185b69f2a --- /dev/null +++ b/Week1/world_queries.js @@ -0,0 +1,44 @@ +const mysql = require('mysql'); +const util = require('util'); + +const connection = mysql.createConnection({ + host: 'localhost', + user: 'hyfuser', + password: 'hyfpassword', + database: 'world', +}); + +const execute_Query = util.promisify(connection.query.bind(connection)); + +async function executeQueries(){ + try{ + const countriesPopulation = await execute_Query('SELECT name FROM country WHERE population > 8000000' ); + const countriesLand = await execute_Query("SELECT name FROM country WHERE name LIKE '%land%'"); + const citiesPopulation = await execute_Query('SELECT name FROM city WHERE population BETWEEN 500000 AND 1000000'); + const europeCountries = await execute_Query("SELECT name FROM country WHERE continent = 'Europe'"); + const countriesBySurfaceArea = await execute_Query('SELECT name FROM country ORDER BY surfaceArea DESC'); + const citiesNetherlands = await execute_Query("SELECT name FROM city WHERE countryCode = 'NLD'"); + const populationRotterdam = await execute_Query("SELECT population FROM city WHERE name = 'Rotterdam'"); + const top10CountriesBySurfaceArea = await execute_Query('SELECT name FROM country ORDER BY surfaceArea DESC LIMIT 10'); + const top10MostPopulatedCities = await execute_Query('SELECT name FROM city ORDER BY population DESC LIMIT 10'); + const worldPopulation = await execute_Query('SELECT SUM(population) AS world_population FROM country'); + + console.log('Countries with population greater than 8 million:', countriesPopulation); + console.log('Countries with "land" in their names:', countriesLand); + console.log('Cities with population between 500,000 and 1 million:', citiesPopulation); + console.log('All countries in Europe:', europeCountries); + console.log('All countries ordered by surface area:', countriesBySurfaceArea); + console.log('All cities in the Netherlands:', citiesNetherlands); + console.log('Population of Rotterdam:', populationRotterdam); + console.log('Top 10 countries by surface area:', top10CountriesBySurfaceArea); + console.log('Top 10 most populated cities:', top10MostPopulatedCities); + console.log('Total world population:', worldPopulation); + } + catch(error){ + console.error(error); + } finally { + connection.end(); + } +} + +executeQueries(); \ No newline at end of file diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 000000000..cc9bb8ea4 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,105 @@ +{ + "name": "databases-cohort48", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "mysql": "^2.18.1" + } + }, + "node_modules/bignumber.js": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", + "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "license": "MIT" + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "license": "MIT" + }, + "node_modules/mysql": { + "version": "2.18.1", + "resolved": "https://registry.npmjs.org/mysql/-/mysql-2.18.1.tgz", + "integrity": "sha512-Bca+gk2YWmqp2Uf6k5NFEurwY/0td0cpebAucFpY/3jhrwrVGuxU2uQFCHjU19SJfje0yQvi+rVWdq78hR5lig==", + "license": "MIT", + "dependencies": { + "bignumber.js": "9.0.0", + "readable-stream": "2.3.7", + "safe-buffer": "5.1.2", + "sqlstring": "2.3.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "license": "MIT" + }, + "node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "license": "MIT" + }, + "node_modules/sqlstring": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/sqlstring/-/sqlstring-2.3.1.tgz", + "integrity": "sha512-ooAzh/7dxIG5+uDik1z/Rd1vli0+38izZhGzSa34FwR7IbelPWCCKSNIl8jlL/F7ERvy8CB2jNeM1E9i9mXMAQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 000000000..49ed5f993 --- /dev/null +++ b/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "mysql": "^2.18.1" + } +} From cc88d6fcdf373c18018b2b4940bc4ad02215c298 Mon Sep 17 00:00:00 2001 From: mahtabmardani88 Date: Fri, 9 Aug 2024 20:27:44 +0200 Subject: [PATCH 2/5] mahtab mardani week2 databases --- .vscode/settings.json | 8 + Week1/database_diagram.drawio | 244 ++++++++++++++++++++ Week1/recipe_DataBase.js | 7 +- Week2/database_diagram_week2.drawio | 337 ++++++++++++++++++++++++++++ Week2/exercise.js | 197 ++++++++++++++++ Week2/recipe_DataBase_week2.js | 164 ++++++++++++++ 6 files changed, 955 insertions(+), 2 deletions(-) create mode 100644 .vscode/settings.json create mode 100644 Week1/database_diagram.drawio create mode 100644 Week2/database_diagram_week2.drawio create mode 100644 Week2/exercise.js create mode 100644 Week2/recipe_DataBase_week2.js diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..f51be4cfd --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,8 @@ +{ + "cSpell.words": [ + "hyfpassword", + "hyfuser", + "recipestep", + "Tamagoyaki" + ] +} \ No newline at end of file diff --git a/Week1/database_diagram.drawio b/Week1/database_diagram.drawio new file mode 100644 index 000000000..891b65f4e --- /dev/null +++ b/Week1/database_diagram.drawio @@ -0,0 +1,244 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Week1/recipe_DataBase.js b/Week1/recipe_DataBase.js index 172be78fc..464f79f80 100644 --- a/Week1/recipe_DataBase.js +++ b/Week1/recipe_DataBase.js @@ -8,6 +8,7 @@ const connection = mysql.createConnection({ }); connection.connect(); +const createDatabaseAndTables = () => { const create_dataBase_query = "CREATE DATABASE IF NOT EXISTS recipe_db"; connection.query(create_dataBase_query, function (error, results, fields) { if (error) throw error; @@ -17,7 +18,7 @@ connection.query(create_dataBase_query, function (error, results, fields) { connection.query(use_database_query, function (error, results, fields) { console.log("Using 'recipe_db' database."); - const CREATE_TABLE = [ + const CREATE_TABLES = [ "CREATE TABLE IF NOT EXISTS Recipe (recipe_id INT AUTO_INCREMENT, recipe_name VARCHAR(255) NOT NULL, PRIMARY KEY(recipe_id))", "CREATE TABLE IF NOT EXISTS Category (category_id INT AUTO_INCREMENT, category_name VARCHAR(255) NOT NULL, PRIMARY KEY(category_id))", "CREATE TABLE IF NOT EXISTS Ingredient (ingredient_id INT AUTO_INCREMENT, ingredient_name VARCHAR(255) NOT NULL, PRIMARY KEY(ingredient_id))", @@ -27,7 +28,7 @@ connection.query(create_dataBase_query, function (error, results, fields) { "CREATE TABLE IF NOT EXISTS RecipeStep (recipe_id INT, step_id INT, step_order INT, PRIMARY KEY(recipe_id, step_id), FOREIGN KEY(recipe_id) REFERENCES Recipe(recipe_id), FOREIGN KEY(step_id) REFERENCES Step(step_id))" ]; - CREATE_TABLE.forEach((query) => { + CREATE_TABLES.forEach((query) => { connection.query(query, function (error, results, fields) { if (error) throw error; console.log(`Table created or already exists.`); @@ -36,3 +37,5 @@ connection.query(create_dataBase_query, function (error, results, fields) { connection.end(); }); }); +}; +module.exports = {createDatabaseAndTables , connection}; \ No newline at end of file diff --git a/Week2/database_diagram_week2.drawio b/Week2/database_diagram_week2.drawio new file mode 100644 index 000000000..dec9a7876 --- /dev/null +++ b/Week2/database_diagram_week2.drawio @@ -0,0 +1,337 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Week2/exercise.js b/Week2/exercise.js new file mode 100644 index 000000000..44cbf6a0f --- /dev/null +++ b/Week2/exercise.js @@ -0,0 +1,197 @@ +const mysql = require('mysql'); + +const connection = mysql.createConnection({ + host: 'localhost', + user: 'hyfuser', + password: 'hyfpassword', + database: 'author_db', +}); + +connection.connect((err) => { + if (err) throw err; + console.log('Connected to mysql server.'); + connection.query('CREATE DATABASE IF NOT EXISTS author_db', (error, results) => { + if (error) throw error; + console.log('Database "author_db" created or already exists.'); + + connection.changeUser({database: 'author_db'}, (err) => { + if (err) throw err; + console.log('Using "author_db" database.'); + createTables(); + }); + }); +}); + +function createTables() { + const AUTHORS = ` + CREATE TABLE IF NOT EXISTS authors ( + author_id INT AUTO_INCREMENT PRIMARY KEY, + author_name VARCHAR(255), + university VARCHAR(255), + date_of_birth DATE, + h_index INT, + gender ENUM('male', 'female', 'other'), + mentor INT, + FOREIGN KEY (mentor) REFERENCES authors(author_id) ON DELETE SET NULL + ); + `; + connection.query(AUTHORS, (error, results, fields) => { + if (error) throw error; + console.log('Table "authors" created or already exists.'); + createResearchPapersTable(); + }); +} + +function createResearchPapersTable() { + const RESEARCH_PAPERS = ` + CREATE TABLE IF NOT EXISTS research_papers ( + paper_id INT AUTO_INCREMENT PRIMARY KEY, + paper_title VARCHAR(255), + conference VARCHAR(255), + publish_date DATE, + author_id INT, + FOREIGN KEY (author_id) REFERENCES authors(author_id) ON DELETE CASCADE + ); + `; + connection.query(RESEARCH_PAPERS, (error, results, fields) => { + if (error) throw error; + console.log('Table "research_papers" created or already exists.'); + insertSampleData(); + }); +} + +function insertSampleData() { + let authors = []; + let papers = []; + + for (let i = 1; i <= 15; i++) { + const year = 1980 + i; + const author = [ + `Author ${i}`, + `University ${String.fromCharCode(64 + (i % 3) + 1)}`, + `${year}-01-01`, + Math.floor(Math.random() * 20) + 1, + i % 2 === 0 ? 'male' : 'female', + i > 1 ? Math.floor(Math.random() * (i - 1)) + 1 : null + ]; + authors.push(author); + } + + for (let i = 1; i <= 30; i++) { + const paper = [ + `Paper ${i}`, + `Conference ${String.fromCharCode(64 + (i % 3) + 1)}`, + `2022-01-${(i % 30) + 1}`, + Math.floor(Math.random() * 15) + 1 + ]; + papers.push(paper); + } + + const INSERT_AUTHORS = ` + INSERT INTO authors (author_name, university, date_of_birth, h_index, gender, mentor) + VALUES ?; + `; + connection.query(INSERT_AUTHORS, [authors], (error, results, fields) => { + if (error) throw error; + console.log('Sample data inserted into "authors" table.'); + insertResearchPapers(papers); + }); +} + +function insertResearchPapers(papers) { + const INSERT_RESEARCH_PAPERS = ` + INSERT INTO research_papers (paper_title, conference, publish_date, author_id) + VALUES ?; + `; + connection.query(INSERT_RESEARCH_PAPERS, [papers], (error, results, fields) => { + if (error) throw error; + console.log('Sample data inserted into "research_papers" table.'); + runQueries(); + }); +} + +function runQueries() { + const QUERY_AUTHORS_MENTORS = ` + SELECT a1.author_name AS author, a2.author_name AS mentor + FROM authors a1 + LEFT JOIN authors a2 ON a1.mentor = a2.author_id; + `; + + connection.query(QUERY_AUTHORS_MENTORS, (error, results, fields) => { + if (error) throw error; + console.log('Authors and their corresponding mentors:', results); + }); + + const QUERY_AUTHORS_PAPERS = ` + SELECT authors.author_name, authors.university, research_papers.paper_title + FROM authors + LEFT JOIN research_papers ON authors.author_id = research_papers.author_id; + `; + + connection.query(QUERY_AUTHORS_PAPERS, (error, results, fields) => { + if (error) throw error; + console.log('Authors and their published papers:', results); + }); + + const QUERY_PAPER_AUTHOR_COUNT = ` + SELECT paper_title, COUNT(author_id) AS author_count + FROM research_papers + GROUP BY paper_title; + `; + + connection.query(QUERY_PAPER_AUTHOR_COUNT, (error, results, fields) => { + if (error) throw error; + console.log('Number of authors per paper:', results); + }); + + const QUERY_FEMALE_PAPER_SUM = ` + SELECT SUM(rp.paper_count) AS female_paper_sum + FROM ( + SELECT author_id, COUNT(*) AS paper_count + FROM research_papers + GROUP BY author_id + ) rp + JOIN authors ON rp.author_id = authors.author_id + WHERE authors.gender = 'female'; + `; + + connection.query(QUERY_FEMALE_PAPER_SUM, (error, results, fields) => { + if (error) throw error; + console.log('Sum of research papers by female authors:', results); + }); + + const QUERY_AVG_H_INDEX_PER_UNIVERSITY = ` + SELECT university, AVG(h_index) AS avg_h_index + FROM authors + GROUP BY university; + `; + + connection.query(QUERY_AVG_H_INDEX_PER_UNIVERSITY, (error, results, fields) => { + if (error) throw error; + console.log('Average h-index per university:', results); + }); + + const QUERY_SUM_PAPERS_PER_UNIVERSITY = ` + SELECT university, COUNT(research_papers.paper_id) AS paper_count + FROM authors + LEFT JOIN research_papers ON authors.author_id = research_papers.author_id + GROUP BY university; + `; + + connection.query(QUERY_SUM_PAPERS_PER_UNIVERSITY, (error, results, fields) => { + if (error) throw error; + console.log('Sum of research papers per university:', results); + }); + + const QUERY_MIN_MAX_H_INDEX_PER_UNIVERSITY = ` + SELECT university, MIN(h_index) AS min_h_index, MAX(h_index) AS max_h_index + FROM authors + GROUP BY university; + `; + + connection.query(QUERY_MIN_MAX_H_INDEX_PER_UNIVERSITY, (error, results, fields) => { + if (error) throw error; + console.log('Min and max h-index per university:', results); + connection.end(); + }); +} diff --git a/Week2/recipe_DataBase_week2.js b/Week2/recipe_DataBase_week2.js new file mode 100644 index 000000000..91c75266f --- /dev/null +++ b/Week2/recipe_DataBase_week2.js @@ -0,0 +1,164 @@ +const {createDatabaseAndTables,connection} = require("../Week1/recipe_DataBase.js"); + +if (!connection._connectCalled) { + connection.connect((err) => { + if (err) throw err; + console.log("Connected to the database."); + runQueries(); + }); +} else { + runQueries(); +} + +function runQueries() { + createDatabaseAndTables(() => { + + const ADD_TYPE_COLUMN_QUERY = ` + ALTER TABLE Recipe + ADD COLUMN type ENUM + ( + 'vegetarian', + 'vegan', + 'non-vegetarian' + ) NOT NULL AFTER recipe_name;`; + + connection.query(ADD_TYPE_COLUMN_QUERY, function (error, results, fields) { + if (error) throw error; + console.log("Type column added to Recipe table."); + + + + // add categories to the table => Category + const ADD_CATEGORIES_QUERY = ` + INSERT INTO Category (category_name) VALUES + ('Cake'), + ('No-Bake'), + ('Vegetarian'), + ('Vegan'), + ('Gluten-Free'), + ('Japanese');`; + + connection.query(ADD_CATEGORIES_QUERY, function (error, results, fields) { + if (error) throw error; + console.log("Categories added to Category table."); + + + + //add materials to the table => Ingredient + const ADD_INGREDIENTS_QUERY = ` + INSERT INTO Ingredient (ingredient_name) VALUES + ('Condensed milk'), + ('Cream Cheese'), + ('Lemon Juice'), + ('Pie Crust'), + ('Cherry Jam'), + ('Brussels Sprouts'), + ('Butter'), + ('Flour'), + ('Salt'), + ('Pepper'), + ('Milk'), + ('Shredded Cheddar cheese'), + ('Macaroni'), + ('Eggs'), + ('Soy sauce'), + ('Sugar'), + ('Olive Oil');`; + + connection.query(ADD_INGREDIENTS_QUERY, function (error, results, fields) { + if (error) throw error; + console.log("Ingredients added to Ingredient table."); + + + + // sort of the materials and categories and add commands to the table => recipe + const ADD_RECIPES_QUERY = ` + INSERT INTO Recipe (recipe_name, type) VALUES + ('No-Bake Cheesecake', 'vegetarian'), + ('Roasted Brussels Sprouts', 'vegan'), + ('Mac & Cheese', 'vegetarian'), + ('Tamagoyaki Japanese Omelette', 'vegetarian'); + + INSERT INTO RecipeCategory (recipe_id, category_id) VALUES + (1, 1), (1, 2), (1, 3), -- No-Bake Cheesecake with Cake, No-Bake, Vegetarian + (2, 5), (2, 4), -- Roasted Brussels Sprouts with Gluten-Free, Vegan + (3, 3), -- Mac & Cheese with Vegetarian + (4, 3), (4, 6); -- Tamagoyaki Japanese Omelette with Vegetarian, Japanese + + INSERT INTO RecipeIngredient (recipe_id, ingredient_id) VALUES + (1, 1), (1, 2), (1, 3), (1, 4), (1, 5), -- Ingredients for No-Bake Cheesecake + (2, 6), (2, 3), (2, 9), (2, 11), (2, 7), -- Ingredients for Roasted Brussels Sprouts + (3, 13), (3, 7), (3, 8), (3, 9), (3, 12), (3, 10), (3, 11), -- Ingredients for Mac & Cheese + (4, 14), (4, 15), (4, 16), (4, 17); -- Ingredients for Tamagoyaki Japanese Omelette + `; + + connection.query(ADD_RECIPES_QUERY, function (error, results, fields) { + if (error) throw error; + console.log("Recipes and their associations added."); + + // perform query + const VEGETARIAN_POTATO_RECIPES_QUERY = ` + SELECT recipe_name + FROM Recipe + JOIN RecipeIngredient ON Recipe.recipe_id = RecipeIngredient.recipe_id + JOIN Ingredient ON RecipeIngredient.ingredient_id = Ingredient.ingredient_id + WHERE Recipe.type = 'vegetarian' + AND Ingredient.ingredient_name = 'potato';`; + + connection.query(VEGETARIAN_POTATO_RECIPES_QUERY, function (error, results, fields) { + if (error) throw error; + console.log("Vegetarian recipes with potatoes:", results); + + const CAKES_NO_BAKING_QUERY = ` + SELECT recipe_name + FROM Recipe + JOIN RecipeCategory ON Recipe.recipe_id = RecipeCategory.recipe_id + JOIN Category ON RecipeCategory.category_id = Category.category_id + WHERE Category.category_name = 'cake' + AND Recipe.recipe_name LIKE '%no bake%';`; + + connection.query(CAKES_NO_BAKING_QUERY, function (error, results, fields) { + if (error) throw error; + console.log("Cakes that do not need baking:", results); + + const VEGAN_JAPANESE_RECIPES_QUERY = ` + SELECT recipe_name + FROM Recipe + JOIN RecipeCategory ON Recipe.recipe_id = RecipeCategory.recipe_id + JOIN Category ON RecipeCategory.category_id = Category.category_id + WHERE Recipe.type = 'vegan' + OR Category.category_name = 'Japanese';`; + + connection.query(VEGAN_JAPANESE_RECIPES_QUERY, function (error, results, fields) { + if (error) throw error; + console.log("Vegan and Japanese recipes:", results); + + connection.end(); + }); + }); + }); + }); + }); + }); + }); + }); +} + + +// * don't forget +// SELECT +// Recipe.recipe_name, +// Recipe.type, +// Ingredient.ingredient_name, +// Category.category_name, +// Step.step_description +// FROM Recipe +// LEFT JOIN RecipeIngredient ON Recipe.recipe_id = RecipeIngredient.recipe_id +// LEFT JOIN Ingredient ON RecipeIngredient.ingredient_id = Ingredient.ingredient_id +// LEFT JOIN RecipeCategory ON Recipe.recipe_id = RecipeCategory.recipe_id +// LEFT JOIN Category ON RecipeCategory.category_id = Category.category_id +// LEFT JOIN RecipeStep ON Recipe.recipe_id = RecipeStep.recipe_id +// LEFT JOIN Step ON RecipeStep.step_id = Step.step_id; + + +// DESCRIBE recipestep; \ No newline at end of file From 3fbf31099360a29146d6535a2e7a5e6918f6cf74 Mon Sep 17 00:00:00 2001 From: mahtabmardani88 Date: Mon, 12 Aug 2024 14:36:36 +0200 Subject: [PATCH 3/5] mahtab mardani update files --- Week2/MYSQL_exercises/create_tables.js | 52 +++++ Week2/MYSQL_exercises/insert_data.js | 64 ++++++ Week2/MYSQL_exercises/queries.js | 93 +++++++++ Week2/{ => QA_PREP_EX}/QA_PREP_EXERCISE.md | 0 .../database_diagram_week2.drawio | 4 +- .../{ => QA_PREP_EX}/recipe_DataBase_week2.js | 45 +--- Week2/exercise.js | 197 ------------------ 7 files changed, 219 insertions(+), 236 deletions(-) create mode 100644 Week2/MYSQL_exercises/create_tables.js create mode 100644 Week2/MYSQL_exercises/insert_data.js create mode 100644 Week2/MYSQL_exercises/queries.js rename Week2/{ => QA_PREP_EX}/QA_PREP_EXERCISE.md (100%) rename Week2/{ => QA_PREP_EX}/database_diagram_week2.drawio (99%) rename Week2/{ => QA_PREP_EX}/recipe_DataBase_week2.js (86%) delete mode 100644 Week2/exercise.js diff --git a/Week2/MYSQL_exercises/create_tables.js b/Week2/MYSQL_exercises/create_tables.js new file mode 100644 index 000000000..b79c5a0ea --- /dev/null +++ b/Week2/MYSQL_exercises/create_tables.js @@ -0,0 +1,52 @@ +const mysql = require('mysql'); + +const connection = mysql.createConnection({ + host: 'localhost', + user: 'hyfuser', + password: 'hyfpassword', + database: 'author_db', +}); + +connection.connect((err) => { + if (err) throw err; + console.log('Connected to mysql server.'); + createTables(); +}); + +function createTables() { + const AUTHORS = ` + CREATE TABLE IF NOT EXISTS authors ( + author_id INT AUTO_INCREMENT PRIMARY KEY, + author_name VARCHAR(255), + university VARCHAR(255), + date_of_birth DATE, + h_index INT, + gender ENUM('male', 'female', 'other'), + mentor INT, + FOREIGN KEY (mentor) REFERENCES authors(author_id) ON DELETE SET NULL + ); + `; + connection.query(AUTHORS, (error, results, fields) => { + if (error) throw error; + console.log('Table "authors" created or already exists.'); + createResearchPapersTable(); + }); +} + +function createResearchPapersTable() { + const RESEARCH_PAPERS = ` + CREATE TABLE IF NOT EXISTS research_papers ( + paper_id INT AUTO_INCREMENT PRIMARY KEY, + paper_title VARCHAR(255), + conference VARCHAR(255), + publish_date DATE, + author_id INT, + FOREIGN KEY (author_id) REFERENCES authors(author_id) ON DELETE CASCADE + ); + `; + connection.query(RESEARCH_PAPERS, (error, results, fields) => { + if (error) throw error; + console.log('Table "research_papers" created or already exists.'); + connection.end(); + }); +} diff --git a/Week2/MYSQL_exercises/insert_data.js b/Week2/MYSQL_exercises/insert_data.js new file mode 100644 index 000000000..0720d83de --- /dev/null +++ b/Week2/MYSQL_exercises/insert_data.js @@ -0,0 +1,64 @@ +const mysql = require('mysql'); + +const connection = mysql.createConnection({ + host: 'localhost', + user: 'hyfuser', + password: 'hyfpassword', + database: 'author_db', +}); + +connection.connect((err) => { + if (err) throw err; + console.log('Connected to mysql server.'); + insertSampleData(); +}); + +function insertSampleData() { + let authors = []; + let papers = []; + + for (let i = 1; i <= 15; i++) { + const year = 1980 + i; + const author = [ + `Author ${i}`, + `University ${String.fromCharCode(64 + (i % 3) + 1)}`, + `${year}-01-01`, + Math.floor(Math.random() * 20) + 1, + i % 2 === 0 ? 'male' : 'female', + i > 1 ? Math.floor(Math.random() * (i - 1)) + 1 : null + ]; + authors.push(author); + } + + for (let i = 1; i <= 30; i++) { + const paper = [ + `Paper ${i}`, + `Conference ${String.fromCharCode(64 + (i % 3) + 1)}`, + `2022-01-${(i % 30) + 1}`, + Math.floor(Math.random() * 15) + 1 + ]; + papers.push(paper); + } + + const INSERT_AUTHORS = ` + INSERT INTO authors (author_name, university, date_of_birth, h_index, gender, mentor) + VALUES ?; + `; + connection.query(INSERT_AUTHORS, [authors], (error, results, fields) => { + if (error) throw error; + console.log('Sample data inserted into "authors" table.'); + insertResearchPapers(papers); + }); +} + +function insertResearchPapers(papers) { + const INSERT_RESEARCH_PAPERS = ` + INSERT INTO research_papers (paper_title, conference, publish_date, author_id) + VALUES ?; + `; + connection.query(INSERT_RESEARCH_PAPERS, [papers], (error, results, fields) => { + if (error) throw error; + console.log('Sample data inserted into "research_papers" table.'); + connection.end(); + }); +} diff --git a/Week2/MYSQL_exercises/queries.js b/Week2/MYSQL_exercises/queries.js new file mode 100644 index 000000000..29bddf4c5 --- /dev/null +++ b/Week2/MYSQL_exercises/queries.js @@ -0,0 +1,93 @@ +const mysql = require('mysql'); + +const connection = mysql.createConnection({ + host: 'localhost', + user: 'hyfuser', + password: 'hyfpassword', + database: 'author_db', +}); + +connection.connect((err) => { + if (err) throw err; + console.log('Connected to mysql server.'); + runQueries(); +}); + +function runQueries() { + const QUERY_AUTHORS_MENTORS = ` + SELECT a1.author_name AS author, a2.author_name AS mentor + FROM authors a1 + LEFT JOIN authors a2 ON a1.mentor = a2.author_id; + `; + connection.query(QUERY_AUTHORS_MENTORS, (error, results, fields) => { + if (error) throw error; + console.log('Authors and their corresponding mentors:', results); + }); + + const QUERY_AUTHORS_PAPERS = ` + SELECT authors.author_name, authors.university, research_papers.paper_title + FROM authors + LEFT JOIN research_papers ON authors.author_id = research_papers.author_id; + `; + connection.query(QUERY_AUTHORS_PAPERS, (error, results, fields) => { + if (error) throw error; + console.log('Authors and their published papers:', results); + }); + + const QUERY_PAPER_AUTHOR_COUNT = ` + SELECT paper_title, COUNT(author_id) AS author_count + FROM research_papers + GROUP BY paper_title; + `; + connection.query(QUERY_PAPER_AUTHOR_COUNT, (error, results, fields) => { + if (error) throw error; + console.log('Number of authors per paper:', results); + }); + + const QUERY_FEMALE_PAPER_SUM = ` + SELECT SUM(rp.paper_count) AS female_paper_sum + FROM ( + SELECT author_id, COUNT(*) AS paper_count + FROM research_papers + GROUP BY author_id + ) rp + JOIN authors ON rp.author_id = authors.author_id + WHERE authors.gender = 'female'; + `; + connection.query(QUERY_FEMALE_PAPER_SUM, (error, results, fields) => { + if (error) throw error; + console.log('Sum of research papers by female authors:', results); + }); + + const QUERY_AVG_H_INDEX_PER_UNIVERSITY = ` + SELECT university, AVG(h_index) AS avg_h_index + FROM authors + GROUP BY university; + `; + connection.query(QUERY_AVG_H_INDEX_PER_UNIVERSITY, (error, results, fields) => { + if (error) throw error; + console.log('Average h-index per university:', results); + }); + + const QUERY_SUM_PAPERS_PER_UNIVERSITY = ` + SELECT university, COUNT(research_papers.paper_id) AS paper_count + FROM authors + LEFT JOIN research_papers ON authors.author_id = research_papers.author_id + GROUP BY university; + `; + connection.query(QUERY_SUM_PAPERS_PER_UNIVERSITY, (error, results, fields) => { + if (error) throw error; + console.log('Sum of research papers per university:', results); + }); + + const QUERY_MIN_MAX_H_INDEX_PER_UNIVERSITY = ` + SELECT university, MIN(h_index) AS min_h_index, MAX(h_index) AS max_h_index + FROM authors + GROUP BY university; + `; + connection.query(QUERY_MIN_MAX_H_INDEX_PER_UNIVERSITY, (error, results, fields) => { + if (error) throw error; + console.log('Min and max h-index per university:', results); + connection.end(); + }); +} diff --git a/Week2/QA_PREP_EXERCISE.md b/Week2/QA_PREP_EX/QA_PREP_EXERCISE.md similarity index 100% rename from Week2/QA_PREP_EXERCISE.md rename to Week2/QA_PREP_EX/QA_PREP_EXERCISE.md diff --git a/Week2/database_diagram_week2.drawio b/Week2/QA_PREP_EX/database_diagram_week2.drawio similarity index 99% rename from Week2/database_diagram_week2.drawio rename to Week2/QA_PREP_EX/database_diagram_week2.drawio index dec9a7876..a0ac6463b 100644 --- a/Week2/database_diagram_week2.drawio +++ b/Week2/QA_PREP_EX/database_diagram_week2.drawio @@ -1,6 +1,6 @@ - + @@ -185,7 +185,7 @@ - + diff --git a/Week2/recipe_DataBase_week2.js b/Week2/QA_PREP_EX/recipe_DataBase_week2.js similarity index 86% rename from Week2/recipe_DataBase_week2.js rename to Week2/QA_PREP_EX/recipe_DataBase_week2.js index 91c75266f..57924ce8a 100644 --- a/Week2/recipe_DataBase_week2.js +++ b/Week2/QA_PREP_EX/recipe_DataBase_week2.js @@ -1,4 +1,4 @@ -const {createDatabaseAndTables,connection} = require("../Week1/recipe_DataBase.js"); +const {createDatabaseAndTables,connection} = require("../../Week1/recipe_DataBase.js"); if (!connection._connectCalled) { connection.connect((err) => { @@ -12,65 +12,36 @@ if (!connection._connectCalled) { function runQueries() { createDatabaseAndTables(() => { - const ADD_TYPE_COLUMN_QUERY = ` ALTER TABLE Recipe - ADD COLUMN type ENUM - ( - 'vegetarian', - 'vegan', - 'non-vegetarian' - ) NOT NULL AFTER recipe_name;`; + ADD COLUMN type ENUM ('vegetarian','vegan','non-vegetarian') NOT NULL AFTER recipe_name;`; connection.query(ADD_TYPE_COLUMN_QUERY, function (error, results, fields) { if (error) throw error; console.log("Type column added to Recipe table."); - - // add categories to the table => Category const ADD_CATEGORIES_QUERY = ` INSERT INTO Category (category_name) VALUES - ('Cake'), - ('No-Bake'), - ('Vegetarian'), - ('Vegan'), - ('Gluten-Free'), - ('Japanese');`; + ('Cake'),('No-Bake'),('Vegetarian'),('Vegan'),('Gluten-Free'),('Japanese');`; connection.query(ADD_CATEGORIES_QUERY, function (error, results, fields) { if (error) throw error; console.log("Categories added to Category table."); - - //add materials to the table => Ingredient const ADD_INGREDIENTS_QUERY = ` INSERT INTO Ingredient (ingredient_name) VALUES - ('Condensed milk'), - ('Cream Cheese'), - ('Lemon Juice'), - ('Pie Crust'), - ('Cherry Jam'), - ('Brussels Sprouts'), - ('Butter'), - ('Flour'), - ('Salt'), - ('Pepper'), - ('Milk'), - ('Shredded Cheddar cheese'), - ('Macaroni'), - ('Eggs'), - ('Soy sauce'), - ('Sugar'), - ('Olive Oil');`; + ('Condensed milk'),('Cream Cheese'),('Lemon Juice'), + ('Pie Crust'),('Cherry Jam'),('Brussels Sprouts'), + ('Butter'),('Flour'),('Salt'),('Pepper'),('Milk'), + ('Shredded Cheddar cheese'),('Macaroni'),('Eggs'), + ('Soy sauce'),('Sugar'),('Olive Oil');`; connection.query(ADD_INGREDIENTS_QUERY, function (error, results, fields) { if (error) throw error; console.log("Ingredients added to Ingredient table."); - - // sort of the materials and categories and add commands to the table => recipe const ADD_RECIPES_QUERY = ` INSERT INTO Recipe (recipe_name, type) VALUES diff --git a/Week2/exercise.js b/Week2/exercise.js deleted file mode 100644 index 44cbf6a0f..000000000 --- a/Week2/exercise.js +++ /dev/null @@ -1,197 +0,0 @@ -const mysql = require('mysql'); - -const connection = mysql.createConnection({ - host: 'localhost', - user: 'hyfuser', - password: 'hyfpassword', - database: 'author_db', -}); - -connection.connect((err) => { - if (err) throw err; - console.log('Connected to mysql server.'); - connection.query('CREATE DATABASE IF NOT EXISTS author_db', (error, results) => { - if (error) throw error; - console.log('Database "author_db" created or already exists.'); - - connection.changeUser({database: 'author_db'}, (err) => { - if (err) throw err; - console.log('Using "author_db" database.'); - createTables(); - }); - }); -}); - -function createTables() { - const AUTHORS = ` - CREATE TABLE IF NOT EXISTS authors ( - author_id INT AUTO_INCREMENT PRIMARY KEY, - author_name VARCHAR(255), - university VARCHAR(255), - date_of_birth DATE, - h_index INT, - gender ENUM('male', 'female', 'other'), - mentor INT, - FOREIGN KEY (mentor) REFERENCES authors(author_id) ON DELETE SET NULL - ); - `; - connection.query(AUTHORS, (error, results, fields) => { - if (error) throw error; - console.log('Table "authors" created or already exists.'); - createResearchPapersTable(); - }); -} - -function createResearchPapersTable() { - const RESEARCH_PAPERS = ` - CREATE TABLE IF NOT EXISTS research_papers ( - paper_id INT AUTO_INCREMENT PRIMARY KEY, - paper_title VARCHAR(255), - conference VARCHAR(255), - publish_date DATE, - author_id INT, - FOREIGN KEY (author_id) REFERENCES authors(author_id) ON DELETE CASCADE - ); - `; - connection.query(RESEARCH_PAPERS, (error, results, fields) => { - if (error) throw error; - console.log('Table "research_papers" created or already exists.'); - insertSampleData(); - }); -} - -function insertSampleData() { - let authors = []; - let papers = []; - - for (let i = 1; i <= 15; i++) { - const year = 1980 + i; - const author = [ - `Author ${i}`, - `University ${String.fromCharCode(64 + (i % 3) + 1)}`, - `${year}-01-01`, - Math.floor(Math.random() * 20) + 1, - i % 2 === 0 ? 'male' : 'female', - i > 1 ? Math.floor(Math.random() * (i - 1)) + 1 : null - ]; - authors.push(author); - } - - for (let i = 1; i <= 30; i++) { - const paper = [ - `Paper ${i}`, - `Conference ${String.fromCharCode(64 + (i % 3) + 1)}`, - `2022-01-${(i % 30) + 1}`, - Math.floor(Math.random() * 15) + 1 - ]; - papers.push(paper); - } - - const INSERT_AUTHORS = ` - INSERT INTO authors (author_name, university, date_of_birth, h_index, gender, mentor) - VALUES ?; - `; - connection.query(INSERT_AUTHORS, [authors], (error, results, fields) => { - if (error) throw error; - console.log('Sample data inserted into "authors" table.'); - insertResearchPapers(papers); - }); -} - -function insertResearchPapers(papers) { - const INSERT_RESEARCH_PAPERS = ` - INSERT INTO research_papers (paper_title, conference, publish_date, author_id) - VALUES ?; - `; - connection.query(INSERT_RESEARCH_PAPERS, [papers], (error, results, fields) => { - if (error) throw error; - console.log('Sample data inserted into "research_papers" table.'); - runQueries(); - }); -} - -function runQueries() { - const QUERY_AUTHORS_MENTORS = ` - SELECT a1.author_name AS author, a2.author_name AS mentor - FROM authors a1 - LEFT JOIN authors a2 ON a1.mentor = a2.author_id; - `; - - connection.query(QUERY_AUTHORS_MENTORS, (error, results, fields) => { - if (error) throw error; - console.log('Authors and their corresponding mentors:', results); - }); - - const QUERY_AUTHORS_PAPERS = ` - SELECT authors.author_name, authors.university, research_papers.paper_title - FROM authors - LEFT JOIN research_papers ON authors.author_id = research_papers.author_id; - `; - - connection.query(QUERY_AUTHORS_PAPERS, (error, results, fields) => { - if (error) throw error; - console.log('Authors and their published papers:', results); - }); - - const QUERY_PAPER_AUTHOR_COUNT = ` - SELECT paper_title, COUNT(author_id) AS author_count - FROM research_papers - GROUP BY paper_title; - `; - - connection.query(QUERY_PAPER_AUTHOR_COUNT, (error, results, fields) => { - if (error) throw error; - console.log('Number of authors per paper:', results); - }); - - const QUERY_FEMALE_PAPER_SUM = ` - SELECT SUM(rp.paper_count) AS female_paper_sum - FROM ( - SELECT author_id, COUNT(*) AS paper_count - FROM research_papers - GROUP BY author_id - ) rp - JOIN authors ON rp.author_id = authors.author_id - WHERE authors.gender = 'female'; - `; - - connection.query(QUERY_FEMALE_PAPER_SUM, (error, results, fields) => { - if (error) throw error; - console.log('Sum of research papers by female authors:', results); - }); - - const QUERY_AVG_H_INDEX_PER_UNIVERSITY = ` - SELECT university, AVG(h_index) AS avg_h_index - FROM authors - GROUP BY university; - `; - - connection.query(QUERY_AVG_H_INDEX_PER_UNIVERSITY, (error, results, fields) => { - if (error) throw error; - console.log('Average h-index per university:', results); - }); - - const QUERY_SUM_PAPERS_PER_UNIVERSITY = ` - SELECT university, COUNT(research_papers.paper_id) AS paper_count - FROM authors - LEFT JOIN research_papers ON authors.author_id = research_papers.author_id - GROUP BY university; - `; - - connection.query(QUERY_SUM_PAPERS_PER_UNIVERSITY, (error, results, fields) => { - if (error) throw error; - console.log('Sum of research papers per university:', results); - }); - - const QUERY_MIN_MAX_H_INDEX_PER_UNIVERSITY = ` - SELECT university, MIN(h_index) AS min_h_index, MAX(h_index) AS max_h_index - FROM authors - GROUP BY university; - `; - - connection.query(QUERY_MIN_MAX_H_INDEX_PER_UNIVERSITY, (error, results, fields) => { - if (error) throw error; - console.log('Min and max h-index per university:', results); - connection.end(); - }); -} From f76263c53eb8ae9bae674e6fd6be99fd3c469786 Mon Sep 17 00:00:00 2001 From: mahtabmardani88 Date: Fri, 16 Aug 2024 19:52:40 +0200 Subject: [PATCH 4/5] mahtab mardani week3 database --- Week3/homework/mongodb/.env.example | 9 +- Week3/homework/mongodb/index.js | 73 ++++++----- Week3/homework/mongodb/seedDatabase.js | 5 + Week3/normalization.md | 140 +++++++++++++++++++++ package-lock.json | 161 +++++++++++++++++++++++++ package.json | 2 + 6 files changed, 357 insertions(+), 33 deletions(-) create mode 100644 Week3/normalization.md diff --git a/Week3/homework/mongodb/.env.example b/Week3/homework/mongodb/.env.example index f7bdfd393..c357ba78d 100644 --- a/Week3/homework/mongodb/.env.example +++ b/Week3/homework/mongodb/.env.example @@ -1,7 +1,8 @@ # This is an example .env file. We use .env files to store data that describes the environment the code needs to run on. # For this exercise we need a MONGODB_URL, so: -# - make a copy of this file, call it `.env` -# - look up how to connect to your database in atlas, there is a nice `connect` button that will help you out -# - fill in the link to your new database. Make sure the database is `databaseWeek3`! +# - make a copy of this file, call it .env +# - look up how to connect to your database in atlas, there is a nice connect button that will help you out +# - fill in the link to your new database. Make sure the database is databaseWeek3! + +MONGODB_URL=mongodb+srv://:@/databaseWeek3?retryWrites=true&w=majority -MONGODB_URL=mongodb+srv://:@/databaseWeek3?retryWrites=true&w=majority \ No newline at end of file diff --git a/Week3/homework/mongodb/index.js b/Week3/homework/mongodb/index.js index 41ee8b618..8635277a8 100644 --- a/Week3/homework/mongodb/index.js +++ b/Week3/homework/mongodb/index.js @@ -1,6 +1,13 @@ -const { MongoClient, ServerApiVersion } = require("mongodb"); +require('dotenv').config(); + +console.log("MONGODB_URL:", process.env.MONGODB_URL); +console.log("TEST_VAR:", process.env.TEST_VAR); +const { MongoClient, ServerApiVersion } = require("mongodb"); const { seedDatabase } = require("./seedDatabase.js"); +const uri = process.env.MONGODB_URL; +const client = new MongoClient(uri); + async function createEpisodeExercise(client) { /** @@ -12,7 +19,14 @@ async function createEpisodeExercise(client) { */ // Write code that will add this to the collection! - + const collection = client.db("databaseWeek3").collection("bob_ross_episodes"); + const newEpisode = { + episode: "S09E13", + title: "MOUNTAIN HIDE-AWAY", + elements: ["CIRRUS", "CLOUDS", "CONIFER", "DECIDIOUS", "GRASS", "MOUNTAIN", "MOUNTAINS", "RIVER", "SNOWY_MOUNTAIN", "TREE", "TREES"], + }; + const result = await collection.insertOne(newEpisode); + console.log( `Created season 9 episode 13 and the document got the id ${"TODO: fill in variable here"}` ); @@ -25,28 +39,25 @@ async function findEpisodesExercises(client) { */ // Find the title of episode 2 in season 2 [Should be: WINTER SUN] - + const collection = client.db("databaseWeek3").collection("bob_ross_episodes"); + const episode2 = await collection.findOne({ episode: "S02E02" }); console.log( - `The title of episode 2 in season 2 is ${"TODO: fill in variable here"}` + `The title of episode 2 in season 2 is ${episode2.title}` ); // Find the season and episode number of the episode called "BLACK RIVER" [Should be: S02E06] - - console.log( - `The season and episode number of the "BLACK RIVER" episode is ${"TODO: fill in variable here"}` - ); + const blackRiver = await collection.findOne({ title: "BLACK RIVER" }); + console.log(`The season and episode number of the "BLACK RIVER" episode is ${blackRiver.episode}`); // Find all of the episode titles where Bob Ross painted a CLIFF [Should be: NIGHT LIGHT, EVENING SEASCAPE, SURF'S UP, CLIFFSIDE, BY THE SEA, DEEP WILDERNESS HOME, CRIMSON TIDE, GRACEFUL WATERFALL] - - console.log( - `The episodes that Bob Ross painted a CLIFF are ${"TODO: fill in variable here"}` - ); + const cliffEpisodes = await collection.find({ elements: "CLIFF" }).toArray(); + const cliffTitles = cliffEpisodes.map((ep) => ep.title); + console.log(`The episodes that Bob Ross painted a CLIFF are ${cliffTitles.join(", ")}`); // Find all of the episode titles where Bob Ross painted a CLIFF and a LIGHTHOUSE [Should be: NIGHT LIGHT] - - console.log( - `The episodes that Bob Ross painted a CLIFF and a LIGHTHOUSE are ${"TODO: fill in variable here"}` - ); + const cliffAndLighthouseEpisodes = await collection.find({ elements: { $all: ["CLIFF", "LIGHTHOUSE"] } }).toArray(); + const cliffAndLighthouseTitles = cliffAndLighthouseEpisodes.map((ep) => ep.title); + console.log(`The episodes that Bob Ross painted a CLIFF and a LIGHTHOUSE are ${cliffAndLighthouseTitles.join(", ")}`); } async function updateEpisodeExercises(client) { @@ -56,20 +67,22 @@ async function updateEpisodeExercises(client) { * * Note: do NOT change the data.json file */ - + const collection = client.db("databaseWeek3").collection("bob_ross_episodes"); // Episode 13 in season 30 should be called BLUE RIDGE FALLS, yet it is called BLUE RIDGE FALLERS now. Fix that - - console.log( - `Ran a command to update episode 13 in season 30 and it updated ${"TODO: fill in variable here"} episodes` - ); + const updateTitleResult = await collection.updateOne( + { episode: "S30E13" }, + { $set: { title: "BLUE RIDGE FALLS" } } + ); + console.log(`Ran a command to update episode 13 in season 30 and it updated ${updateTitleResult.modifiedCount} episodes`); // Unfortunately we made a mistake in the arrays and the element type called 'BUSHES' should actually be 'BUSH' as sometimes only one bush was painted. // Update all of the documents in the collection that have `BUSHES` in the elements array to now have `BUSH` // It should update 120 episodes! - - console.log( - `Ran a command to update all the BUSHES to BUSH and it updated ${"TODO: fill in variable here"} episodes` + const updateBushesResult = await collection.updateMany( + { elements: "BUSHES" }, + { $set: { "elements.$": "BUSH" } } ); + console.log(`Ran a command to update all the BUSHES to BUSH and it updated ${updateBushesResult.modifiedCount} episodes`); } async function deleteEpisodeExercise(client) { @@ -77,14 +90,16 @@ async function deleteEpisodeExercise(client) { * It seems an errand episode has gotten into our data. * This is episode 14 in season 31. Please remove it and verify that it has been removed! */ - - console.log( - `Ran a command to delete episode and it deleted ${"TODO: fill in variable here"} episodes` - ); -} + const collection = client.db("databaseWeek3").collection("bob_ross_episodes"); + const deleteResult = await collection.deleteOne({ episode: "S31E14" }); + console.log(`Ran a command to delete episode and it deleted ${deleteResult.deletedCount} episodes`); + } async function main() { if (process.env.MONGODB_URL == null) { + console.log("MONGODB_URL:", process.env.MONGODB_URL); + console.log("TEST_VAR:", process.env.TEST_VAR); + throw Error( `You did not set up the environment variables correctly. Did you create a '.env' file and add a package to create it?` ); diff --git a/Week3/homework/mongodb/seedDatabase.js b/Week3/homework/mongodb/seedDatabase.js index 99be6b3d8..aef7dd027 100644 --- a/Week3/homework/mongodb/seedDatabase.js +++ b/Week3/homework/mongodb/seedDatabase.js @@ -7,6 +7,10 @@ const data = require("./data.json"); * @param {MongoClient} client - The client that is connected to your database */ const seedDatabase = async (client) => { + + const db = client.db("databaseWeek3"); + + const hasCollection = await client .db("databaseWeek3") .listCollections({ name: "bob_ross_episodes" }) @@ -42,6 +46,7 @@ const seedDatabase = async (client) => { // Add our documents await bobRossCollection.insertMany(documents); } else { + await db.createCollection("bob_ross_episodes"); throw Error("The collection `bob_ross_episodes` does not exist!"); } }; diff --git a/Week3/normalization.md b/Week3/normalization.md new file mode 100644 index 000000000..d602a5b4b --- /dev/null +++ b/Week3/normalization.md @@ -0,0 +1,140 @@ +# SQL Normalization Exercise + +## Original Table + +| member_id | member_name | member_address | dinner_id | dinner_date | venue_code | venue_description | food_code | food_description | +|-----------|---------------|----------------|-----------|-------------|------------|-------------------|-----------|------------------| +| 1 | Amit | 325 Max park | D00001001 | 2020-03-15 | B01 | Grand Ball Room | C1, C2 | Curry, Cake | +| 2 | Ben | 24 Hudson lane | D00001002 | 2020/03/15 | B02 | Zoku Roof Top | S1, C2 | Soup, Cake | +| 3 | Cristina | 516 6th Ave | D00001002 | 2020/03/15 | B02 | Zoku Roof Top | S1, C2 | Soup, Cake | +| 4 | Dan | 89 John St | D00001003 | 20-03-2020 | B03 | Goat Farm | P1, T1, M1| Pie, Tea, Mousse | +| 1 | Amit | 325 Max park | D00001003 | 20-03-2020 | B03 | Goat Farm | P1, T1, M1| Pie, Tea, Mousse | +| 3 | Cristina | 516 6th Ave | D00001004 | Mar 25 '20 | B04 | Mama's Kitchen | F1, M1 | Falafel, Mousse | +| 5 | Gabor | 54 Vivaldi St | D00001005 | Mar 26 '20 | B05 | Hungry Hungary | G1, P2 | Goulash, Pasca | +| 6 | Hema | 9 Peter St | D00001003 | 01-04-2020 | B03 | Goat Farm | P1, T1, M1| Pie, Tea, Mousse | + +## Normalization to 1NF + +The original table had columns (food_code, food_description) that contained multiple values. This violated the 1NF requirement that each column must contain atomic (single) values. + +To normalize the table, we split the multiple values in these columns into separate rows. + +### Resulting Table (1NF) + +| member_id | member_name | member_address | dinner_id | dinner_date | venue_code | venue_description | food_code | food_description | +|-----------|-------------|----------------|-----------|-------------|------------|-------------------|-----------|------------------| +| 1 | Amit | 325 Max park | D00001001 | 2020-03-15 | B01 | Grand Ball Room | C1 | Curry | +| 1 | Amit | 325 Max park | D00001001 | 2020-03-15 | B01 | Grand Ball Room | C2 | Cake | +| 2 | Ben | 24 Hudson lane | D00001002 | 2020/03/15 | B02 | Zoku Roof Top | S1 | Soup | +| 2 | Ben | 24 Hudson lane | D00001002 | 2020/03/15 | B02 | Zoku Roof Top | C2 | Cake | +| 3 | Cristina | 516 6th Ave | D00001002 | 2020/03/15 | B02 | Zoku Roof Top | S1 | Soup | +| 3 | Cristina | 516 6th Ave | D00001002 | 2020/03/15 | B02 | Zoku Roof Top | C2 | Cake | +| 4 | Dan | 89 John St | D00001003 | 20-03-2020 | B03 | Goat Farm | P1 | Pie | +| 4 | Dan | 89 John St | D00001003 | 20-03-2020 | B03 | Goat Farm | T1 | Tea | +| 4 | Dan | 89 John St | D00001003 | 20-03-2020 | B03 | Goat Farm | M1 | Mousse | +| 1 | Amit | 325 Max park | D00001003 | 20-03-2020 | B03 | Goat Farm | P1 | Pie | +| 1 | Amit | 325 Max park | D00001003 | 20-03-2020 | B03 | Goat Farm | T1 | Tea | +| 1 | Amit | 325 Max park | D00001003 | 20-03-2020 | B03 | Goat Farm | M1 | Mousse | +| 3 | Cristina | 516 6th Ave | D00001004 | Mar 25 '20 | B04 | Mama's Kitchen | F1 | Falafel | +| 3 | Cristina | 516 6th Ave | D00001004 | Mar 25 '20 | B04 | Mama's Kitchen | M1 | Mousse | +| 5 | Gabor | 54 Vivaldi St | D00001005 | Mar 26 '20 | B05 | Hungry Hungary | G1 | Goulash | +| 5 | Gabor | 54 Vivaldi St | D00001005 | Mar 26 '20 | B05 | Hungry Hungary | P2 | Pasca | +| 6 | Hema | 9 Peter St | D00001003 | 01-04-2020 | B03 | Goat Farm | P1 | Pie | +| 6 | Hema | 9 Peter St | D00001003 | 01-04-2020 | B03 | Goat Farm | T1 | Tea | +| 6 | Hema | 9 Peter St | D00001003 | 01-04-2020 | B03 | Goat Farm | M1 | Mousse | + +## Normalization to 2NF + +In the 1NF table, there were partial dependencies; for instance, the venue information only depended on the dinner_id and not on the member_id. To resolve this, we split the table into two tables: one for dinner information and another for member attendance. + +### Dinner Table (2NF) + +| dinner_id | dinner_date | venue_code | venue_description | +|------------|-------------|------------|-------------------| +| D00001001 | 2020-03-15 | B01 | Grand Ball Room | +| D00001002 | 2020/03/15 | B02 | Zoku Roof Top | +| D00001003 | 20-03-2020 | B03 | Goat Farm | +| D00001004 | Mar 25 '20 | B04 | Mama's Kitchen | +| D00001005 | Mar 26 '20 | B05 | Hungry Hungary | + +### Member Attendance Table (2NF) + +| member_id | dinner_id | food_code | food_description | +|-----------|-----------|-----------|------------------| +| 1 | D00001001 | C1 | Curry | +| 1 | D00001001 | C2 | Cake | +| 2 | D00001002 | S1 | Soup | +| 2 | D00001002 | C2 | Cake | +| 3 | D00001002 | S1 | Soup | +| 3 | D00001002 | C2 | Cake | +| 4 | D00001003 | P1 | Pie | +| 4 | D00001003 | T1 | Tea | +| 4 | D00001003 | M1 | Mousse | +| 1 | D00001003 | P1 | Pie | +| 1 | D00001003 | T1 | Tea | +| 1 | D00001003 | M1 | Mousse | +| 3 | D00001004 | F1 | Falafel | +| 3 | D00001004 | M1 | Mousse | +| 5 | D00001005 | G1 | Goulash | +| 5 | D00001005 | P2 | Pasca | +| 6 | D00001003 | P1 | Pie | +| 6 | D00001003 | T1 | Tea | +| 6 | D00001003 | M1 | Mousse | + +## Normalization to 3NF + +The 2NF table still had transitive dependencies, such as venue_description being dependent on venue_code. To achieve 3NF, we separate these into their own tables. + +### Venue Table (3NF) + +| venue_code | venue_description | +|------------|-------------------| +| B01 | Grand Ball Room | +| B02 | Zoku Roof Top | +| B03 | Goat Farm | +| B04 | Mama's Kitchen | +| B05 | Hungry Hungary | + +### Food Table (3NF) + +| food_code | food_description | +|-----------|------------------| +| C1 | Curry | +| C2 | Cake | +| S1 | Soup | +| P1 | Pie | +| T1 | Tea | +| M1 | Mousse | +| F1 | Falafel | +| G1 | Goulash | +| P2 | Pasca | + +### Member Attendance Table (Final 3NF Version) + +| member_id | dinner_id | +|-----------|-----------| +| 1 | D00001001 | +| 2 | D00001002 | +| 3 | D00001002 | +| 4 | D00001003 | +| 1 | D00001003 | +| 3 | D00001004 | +| 5 | D00001005 | +| 6 | D00001003 | + +## Discussion + +### 1. Was your database already in 2NF / 3NF? +No, the original database was not in 2NF or 3NF. It contained columns with multiple values, which violates 1NF. Also, there were partial and transitive dependencies which violated 2NF and 3NF. + +### 2. What changes did you have to do to normalize your database? +- Split columns with multiple values into individual rows (1NF). +- Separated data into multiple tables to remove partial dependencies (2NF). +- Further split tables to remove transitive dependencies (3NF). + +### 3. What challenges do you foresee if you want to add thousands of recipes to your database? +- **Data Consistency:** Maintaining data consistency across multiple related tables could become complex. +- **Query Performance:** As the number of records grows, querying large datasets might become slower without proper indexing. +- **Storage Efficiency:** Storing large amounts of normalized data across multiple tables might require more storage space. +- **Maintenance:** Updating or deleting data might require changes in multiple tables, increasing maintenance complexity. + diff --git a/package-lock.json b/package-lock.json index cc9bb8ea4..9ba4da3e6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5,9 +5,35 @@ "packages": { "": { "dependencies": { + "dotenv": "^16.4.5", + "mongodb": "^6.8.0", "mysql": "^2.18.1" } }, + "node_modules/@mongodb-js/saslprep": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.1.8.tgz", + "integrity": "sha512-qKwC/M/nNNaKUBMQ0nuzm47b7ZYWQHN3pcXq4IIcoSBc2hOIrflAxJduIvvqmhoz3gR2TacTAs8vlsCVPkiEdQ==", + "license": "MIT", + "dependencies": { + "sparse-bitfield": "^3.0.3" + } + }, + "node_modules/@types/webidl-conversions": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz", + "integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==", + "license": "MIT" + }, + "node_modules/@types/whatwg-url": { + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-11.0.5.tgz", + "integrity": "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==", + "license": "MIT", + "dependencies": { + "@types/webidl-conversions": "*" + } + }, "node_modules/bignumber.js": { "version": "9.0.0", "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", @@ -17,12 +43,33 @@ "node": "*" } }, + "node_modules/bson": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/bson/-/bson-6.8.0.tgz", + "integrity": "sha512-iOJg8pr7wq2tg/zSlCCHMi3hMm5JTOxLTagf3zxhcenHsFp+c6uOs6K7W5UE7A4QIJGtqh/ZovFNMP4mOPJynQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=16.20.1" + } + }, "node_modules/core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", "license": "MIT" }, + "node_modules/dotenv": { + "version": "16.4.5", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", + "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -35,6 +82,68 @@ "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", "license": "MIT" }, + "node_modules/memory-pager": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", + "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==", + "license": "MIT" + }, + "node_modules/mongodb": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.8.0.tgz", + "integrity": "sha512-HGQ9NWDle5WvwMnrvUxsFYPd3JEbqD3RgABHBQRuoCEND0qzhsd0iH5ypHsf1eJ+sXmvmyKpP+FLOKY8Il7jMw==", + "license": "Apache-2.0", + "dependencies": { + "@mongodb-js/saslprep": "^1.1.5", + "bson": "^6.7.0", + "mongodb-connection-string-url": "^3.0.0" + }, + "engines": { + "node": ">=16.20.1" + }, + "peerDependencies": { + "@aws-sdk/credential-providers": "^3.188.0", + "@mongodb-js/zstd": "^1.1.0", + "gcp-metadata": "^5.2.0", + "kerberos": "^2.0.1", + "mongodb-client-encryption": ">=6.0.0 <7", + "snappy": "^7.2.2", + "socks": "^2.7.1" + }, + "peerDependenciesMeta": { + "@aws-sdk/credential-providers": { + "optional": true + }, + "@mongodb-js/zstd": { + "optional": true + }, + "gcp-metadata": { + "optional": true + }, + "kerberos": { + "optional": true + }, + "mongodb-client-encryption": { + "optional": true + }, + "snappy": { + "optional": true + }, + "socks": { + "optional": true + } + } + }, + "node_modules/mongodb-connection-string-url": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-3.0.1.tgz", + "integrity": "sha512-XqMGwRX0Lgn05TDB4PyG2h2kKO/FfWJyCzYQbIhXUxz7ETt0I/FqHjUeqj37irJ+Dl1ZtU82uYyj14u2XsZKfg==", + "license": "Apache-2.0", + "dependencies": { + "@types/whatwg-url": "^11.0.2", + "whatwg-url": "^13.0.0" + } + }, "node_modules/mysql": { "version": "2.18.1", "resolved": "https://registry.npmjs.org/mysql/-/mysql-2.18.1.tgz", @@ -56,6 +165,15 @@ "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", "license": "MIT" }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/readable-stream": { "version": "2.3.7", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", @@ -77,6 +195,15 @@ "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", "license": "MIT" }, + "node_modules/sparse-bitfield": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", + "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", + "license": "MIT", + "dependencies": { + "memory-pager": "^1.0.2" + } + }, "node_modules/sqlstring": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/sqlstring/-/sqlstring-2.3.1.tgz", @@ -95,11 +222,45 @@ "safe-buffer": "~5.1.0" } }, + "node_modules/tr46": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-4.1.1.tgz", + "integrity": "sha512-2lv/66T7e5yNyhAAC4NaKe5nVavzuGJQVVtRYLyQ2OI8tsJ61PMLlelehb0wi2Hx6+hT/OJUWZcw8MjlSRnxvw==", + "license": "MIT", + "dependencies": { + "punycode": "^2.3.0" + }, + "engines": { + "node": ">=14" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "license": "MIT" + }, + "node_modules/webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/whatwg-url": { + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-13.0.0.tgz", + "integrity": "sha512-9WWbymnqj57+XEuqADHrCJ2eSXzn8WXIW/YSGaZtb2WKAInQ6CHfaUUcTyyver0p8BDg5StLQq8h1vtZuwmOig==", + "license": "MIT", + "dependencies": { + "tr46": "^4.1.1", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=16" + } } } } diff --git a/package.json b/package.json index 49ed5f993..bd7156aea 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,7 @@ { "dependencies": { + "dotenv": "^16.4.5", + "mongodb": "^6.8.0", "mysql": "^2.18.1" } } From 94ee41bf458b22037bd2f85c41b086e29d1f1da5 Mon Sep 17 00:00:00 2001 From: mahtabmardani88 Date: Mon, 19 Aug 2024 14:16:28 +0200 Subject: [PATCH 5/5] update md file --- Week3/normalization.md | 148 +++++------------------------------------ 1 file changed, 17 insertions(+), 131 deletions(-) diff --git a/Week3/normalization.md b/Week3/normalization.md index d602a5b4b..3632f9fef 100644 --- a/Week3/normalization.md +++ b/Week3/normalization.md @@ -1,140 +1,26 @@ # SQL Normalization Exercise -## Original Table +## Questions and Answers -| member_id | member_name | member_address | dinner_id | dinner_date | venue_code | venue_description | food_code | food_description | -|-----------|---------------|----------------|-----------|-------------|------------|-------------------|-----------|------------------| -| 1 | Amit | 325 Max park | D00001001 | 2020-03-15 | B01 | Grand Ball Room | C1, C2 | Curry, Cake | -| 2 | Ben | 24 Hudson lane | D00001002 | 2020/03/15 | B02 | Zoku Roof Top | S1, C2 | Soup, Cake | -| 3 | Cristina | 516 6th Ave | D00001002 | 2020/03/15 | B02 | Zoku Roof Top | S1, C2 | Soup, Cake | -| 4 | Dan | 89 John St | D00001003 | 20-03-2020 | B03 | Goat Farm | P1, T1, M1| Pie, Tea, Mousse | -| 1 | Amit | 325 Max park | D00001003 | 20-03-2020 | B03 | Goat Farm | P1, T1, M1| Pie, Tea, Mousse | -| 3 | Cristina | 516 6th Ave | D00001004 | Mar 25 '20 | B04 | Mama's Kitchen | F1, M1 | Falafel, Mousse | -| 5 | Gabor | 54 Vivaldi St | D00001005 | Mar 26 '20 | B05 | Hungry Hungary | G1, P2 | Goulash, Pasca | -| 6 | Hema | 9 Peter St | D00001003 | 01-04-2020 | B03 | Goat Farm | P1, T1, M1| Pie, Tea, Mousse | +1. **What columns violate 1NF?** + - The columns `food_code` and `food_description` violate 1NF because they contain multiple values in a single cell. -## Normalization to 1NF +2. **What entities do you recognize that could be extracted?** + - We can extract entities like `Members`, `Dinners`, `Venues`, and `Food`. -The original table had columns (food_code, food_description) that contained multiple values. This violated the 1NF requirement that each column must contain atomic (single) values. +3. **Name all the tables and columns that would make a 3NF compliant solution.** + - **Members Table:** `member_id`, `member_name`, `member_address` + - **Dinners Table:** `dinner_id`, `dinner_date`, `venue_code` + - **Venues Table:** `venue_code`, `venue_description` + - **Food Table:** `food_code`, `food_description` + - **Member_Dinners Table:** `member_id`, `dinner_id`, `food_code` -To normalize the table, we split the multiple values in these columns into separate rows. +--- -### Resulting Table (1NF) +# Prep Exercises -| member_id | member_name | member_address | dinner_id | dinner_date | venue_code | venue_description | food_code | food_description | -|-----------|-------------|----------------|-----------|-------------|------------|-------------------|-----------|------------------| -| 1 | Amit | 325 Max park | D00001001 | 2020-03-15 | B01 | Grand Ball Room | C1 | Curry | -| 1 | Amit | 325 Max park | D00001001 | 2020-03-15 | B01 | Grand Ball Room | C2 | Cake | -| 2 | Ben | 24 Hudson lane | D00001002 | 2020/03/15 | B02 | Zoku Roof Top | S1 | Soup | -| 2 | Ben | 24 Hudson lane | D00001002 | 2020/03/15 | B02 | Zoku Roof Top | C2 | Cake | -| 3 | Cristina | 516 6th Ave | D00001002 | 2020/03/15 | B02 | Zoku Roof Top | S1 | Soup | -| 3 | Cristina | 516 6th Ave | D00001002 | 2020/03/15 | B02 | Zoku Roof Top | C2 | Cake | -| 4 | Dan | 89 John St | D00001003 | 20-03-2020 | B03 | Goat Farm | P1 | Pie | -| 4 | Dan | 89 John St | D00001003 | 20-03-2020 | B03 | Goat Farm | T1 | Tea | -| 4 | Dan | 89 John St | D00001003 | 20-03-2020 | B03 | Goat Farm | M1 | Mousse | -| 1 | Amit | 325 Max park | D00001003 | 20-03-2020 | B03 | Goat Farm | P1 | Pie | -| 1 | Amit | 325 Max park | D00001003 | 20-03-2020 | B03 | Goat Farm | T1 | Tea | -| 1 | Amit | 325 Max park | D00001003 | 20-03-2020 | B03 | Goat Farm | M1 | Mousse | -| 3 | Cristina | 516 6th Ave | D00001004 | Mar 25 '20 | B04 | Mama's Kitchen | F1 | Falafel | -| 3 | Cristina | 516 6th Ave | D00001004 | Mar 25 '20 | B04 | Mama's Kitchen | M1 | Mousse | -| 5 | Gabor | 54 Vivaldi St | D00001005 | Mar 26 '20 | B05 | Hungry Hungary | G1 | Goulash | -| 5 | Gabor | 54 Vivaldi St | D00001005 | Mar 26 '20 | B05 | Hungry Hungary | P2 | Pasca | -| 6 | Hema | 9 Peter St | D00001003 | 01-04-2020 | B03 | Goat Farm | P1 | Pie | -| 6 | Hema | 9 Peter St | D00001003 | 01-04-2020 | B03 | Goat Farm | T1 | Tea | -| 6 | Hema | 9 Peter St | D00001003 | 01-04-2020 | B03 | Goat Farm | M1 | Mousse | - -## Normalization to 2NF - -In the 1NF table, there were partial dependencies; for instance, the venue information only depended on the dinner_id and not on the member_id. To resolve this, we split the table into two tables: one for dinner information and another for member attendance. - -### Dinner Table (2NF) - -| dinner_id | dinner_date | venue_code | venue_description | -|------------|-------------|------------|-------------------| -| D00001001 | 2020-03-15 | B01 | Grand Ball Room | -| D00001002 | 2020/03/15 | B02 | Zoku Roof Top | -| D00001003 | 20-03-2020 | B03 | Goat Farm | -| D00001004 | Mar 25 '20 | B04 | Mama's Kitchen | -| D00001005 | Mar 26 '20 | B05 | Hungry Hungary | - -### Member Attendance Table (2NF) - -| member_id | dinner_id | food_code | food_description | -|-----------|-----------|-----------|------------------| -| 1 | D00001001 | C1 | Curry | -| 1 | D00001001 | C2 | Cake | -| 2 | D00001002 | S1 | Soup | -| 2 | D00001002 | C2 | Cake | -| 3 | D00001002 | S1 | Soup | -| 3 | D00001002 | C2 | Cake | -| 4 | D00001003 | P1 | Pie | -| 4 | D00001003 | T1 | Tea | -| 4 | D00001003 | M1 | Mousse | -| 1 | D00001003 | P1 | Pie | -| 1 | D00001003 | T1 | Tea | -| 1 | D00001003 | M1 | Mousse | -| 3 | D00001004 | F1 | Falafel | -| 3 | D00001004 | M1 | Mousse | -| 5 | D00001005 | G1 | Goulash | -| 5 | D00001005 | P2 | Pasca | -| 6 | D00001003 | P1 | Pie | -| 6 | D00001003 | T1 | Tea | -| 6 | D00001003 | M1 | Mousse | - -## Normalization to 3NF - -The 2NF table still had transitive dependencies, such as venue_description being dependent on venue_code. To achieve 3NF, we separate these into their own tables. - -### Venue Table (3NF) - -| venue_code | venue_description | -|------------|-------------------| -| B01 | Grand Ball Room | -| B02 | Zoku Roof Top | -| B03 | Goat Farm | -| B04 | Mama's Kitchen | -| B05 | Hungry Hungary | - -### Food Table (3NF) - -| food_code | food_description | -|-----------|------------------| -| C1 | Curry | -| C2 | Cake | -| S1 | Soup | -| P1 | Pie | -| T1 | Tea | -| M1 | Mousse | -| F1 | Falafel | -| G1 | Goulash | -| P2 | Pasca | - -### Member Attendance Table (Final 3NF Version) - -| member_id | dinner_id | -|-----------|-----------| -| 1 | D00001001 | -| 2 | D00001002 | -| 3 | D00001002 | -| 4 | D00001003 | -| 1 | D00001003 | -| 3 | D00001004 | -| 5 | D00001005 | -| 6 | D00001003 | - -## Discussion - -### 1. Was your database already in 2NF / 3NF? -No, the original database was not in 2NF or 3NF. It contained columns with multiple values, which violates 1NF. Also, there were partial and transitive dependencies which violated 2NF and 3NF. - -### 2. What changes did you have to do to normalize your database? -- Split columns with multiple values into individual rows (1NF). -- Separated data into multiple tables to remove partial dependencies (2NF). -- Further split tables to remove transitive dependencies (3NF). - -### 3. What challenges do you foresee if you want to add thousands of recipes to your database? -- **Data Consistency:** Maintaining data consistency across multiple related tables could become complex. -- **Query Performance:** As the number of records grows, querying large datasets might become slower without proper indexing. -- **Storage Efficiency:** Storing large amounts of normalized data across multiple tables might require more storage space. -- **Maintenance:** Updating or deleting data might require changes in multiple tables, increasing maintenance complexity. +1. **Was your database already in 2NF / 3NF?** + - No, the original database was not in 2NF or 3NF. +2. **What changes did you have to do to normalize your database?** + - We had to split the data into different tables and remove any partial and transitive dependencies.