diff --git a/.env b/.env new file mode 100644 index 0000000..deff20a --- /dev/null +++ b/.env @@ -0,0 +1,3 @@ +PORT=3000 +DATABASE_URL=postgres://postgres:ppp@localhost:5432/todos +PGSSLMODE=disable diff --git a/.gitignore b/.gitignore index 01a7ce3..6a5bcc8 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ blog.md node_modules/ npm-debug.log +package-lock.json diff --git a/Procfile b/Procfile new file mode 100644 index 0000000..1a303fb --- /dev/null +++ b/Procfile @@ -0,0 +1,2 @@ +web: npm start +release: npm run db-migrate -- up diff --git a/app.json b/app.json new file mode 100644 index 0000000..8e01a31 --- /dev/null +++ b/app.json @@ -0,0 +1,19 @@ +{ + "name": "node-postgres-todo", + "description": "", + "scripts": {}, + "env": {}, + "formation": { + "web": { + "quantity": 1 + } + }, + "addons": [ + "heroku-postgresql" + ], + "buildpacks": [ + { + "url": "heroku/nodejs" + } + ] +} diff --git a/bin/www b/bin/www index 96716ac..56adb2c 100755 --- a/bin/www +++ b/bin/www @@ -4,6 +4,9 @@ * Module dependencies. */ +// get environment from .env file, if present +require('dotenv').config(); + var app = require('../app'); var debug = require('debug')('node-postgres-todo:server'); var http = require('http'); diff --git a/migrations/20180610171545-bootstrap-db.js b/migrations/20180610171545-bootstrap-db.js new file mode 100644 index 0000000..79ebbf0 --- /dev/null +++ b/migrations/20180610171545-bootstrap-db.js @@ -0,0 +1,53 @@ +'use strict'; + +var dbm; +var type; +var seed; +var fs = require('fs'); +var path = require('path'); +var Promise; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function(options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; + Promise = options.Promise; +}; + +exports.up = function(db) { + var filePath = path.join(__dirname, 'sqls', '20180610171545-bootstrap-db-up.sql'); + return new Promise( function( resolve, reject ) { + fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ + if (err) return reject(err); + console.log('received data: ' + data); + + resolve(data); + }); + }) + .then(function(data) { + return db.runSql(data); + }); +}; + +exports.down = function(db) { + var filePath = path.join(__dirname, 'sqls', '20180610171545-bootstrap-db-down.sql'); + return new Promise( function( resolve, reject ) { + fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ + if (err) return reject(err); + console.log('received data: ' + data); + + resolve(data); + }); + }) + .then(function(data) { + return db.runSql(data); + }); +}; + +exports._meta = { + "version": 1 +}; diff --git a/migrations/sqls/20180610171545-bootstrap-db-up.sql b/migrations/sqls/20180610171545-bootstrap-db-up.sql new file mode 100644 index 0000000..4ee69e3 --- /dev/null +++ b/migrations/sqls/20180610171545-bootstrap-db-up.sql @@ -0,0 +1,5 @@ +CREATE TABLE items( + id SERIAL PRIMARY KEY, + text VARCHAR NOT NULL, + complete BOOLEAN +); diff --git a/package.json b/package.json index 5301d9e..cc6490d 100644 --- a/package.json +++ b/package.json @@ -3,16 +3,20 @@ "version": "0.0.0", "private": true, "scripts": { - "start": "supervisor ./bin/www" + "start": "node ./bin/www", + "db-migrate": "node ./node_modules/.bin/db-migrate" }, "dependencies": { - "body-parser": "~1.15.1", - "cookie-parser": "~1.4.3", - "debug": "~2.2.0", - "express": "~4.13.4", - "jade": "~1.11.0", - "morgan": "~1.7.0", - "pg": "^6.1.0", - "serve-favicon": "~2.3.0" + "body-parser": "^1.18.3", + "cookie-parser": "1.4.3", + "db-migrate": "^0.11.1", + "db-migrate-pg": "^0.4.0", + "debug": "^3.1.0", + "dotenv": "^6.0.0", + "express": "^4.16.3", + "jade": "1.11.0", + "morgan": "^1.9.0", + "pg": "^7.4.3", + "serve-favicon": "^2.5.0" } } diff --git a/readme.md b/readme.md index d68ecdc..f740654 100644 --- a/readme.md +++ b/readme.md @@ -7,9 +7,9 @@ This is a basic single page application built with Node, Express, Angular, and P ## Quick Start 1. Clone the repo -1. Install dependencies: `npm install` -1. Start your Postgres server and create a database called "todo" -1. Create the database tables: `node server/models/database.js` +1. Install dependencies: `$ npm install` +1. Start your Postgres server and create a database called "todos" +1. Create the database tables: `$ npm run db-migrate up` 1. Start the server: `$ npm start` ## Tests @@ -28,3 +28,19 @@ Using this load test it is possible to verify several things: See the comments in the [script](https://github.com/mjhea0/node-postgres-todo/blob/master/test/load-test.sh) for more information. +## Running on Heroku + +Files are provided to run this app on Heroku. +- `Procfile` - configures the app and release task that migrates the associated database +- `app.json` - configures the app Heroku CI, to run tests automatically as needed + +## Using a Dockerized PostgreSQL server + +If you have docker installed, you can run your PostgreSQL database in a container. +The following command will run a database at the latest PostgreSQL version. + +`docker run --rm -p"5432:5432" -e POSTGRES_PASSWORD=ppp -d postgres` + +Your host port `5432` to the containers port `5432`, so it will appear as if you have a local PostgreSQL install. +The password will be "ppp". +The `--rm` flag deletes the container when you shut it down - leave the flag off to keep it around. diff --git a/server/models/database.js b/server/models/database.js deleted file mode 100644 index c9d8c0a..0000000 --- a/server/models/database.js +++ /dev/null @@ -1,8 +0,0 @@ -const pg = require('pg'); -const connectionString = process.env.DATABASE_URL || 'postgres://localhost:5432/todo'; - -const client = new pg.Client(connectionString); -client.connect(); -const query = client.query( - 'CREATE TABLE items(id SERIAL PRIMARY KEY, text VARCHAR(40) not null, complete BOOLEAN)'); -query.on('end', () => { client.end(); }); diff --git a/server/routes/index.js b/server/routes/index.js index 40d4bb4..c264454 100644 --- a/server/routes/index.js +++ b/server/routes/index.js @@ -1,124 +1,38 @@ const express = require('express'); const router = express.Router(); -const pg = require('pg'); +const { Client } = require('pg'); +const connectionString = process.env.DATABASE_URL; +const pg = new Client({ connectionString }); +pg.connect(); const path = require('path'); -const connectionString = process.env.DATABASE_URL || 'postgres://localhost:5432/todo'; router.get('/', (req, res, next) => { res.sendFile(path.join( __dirname, '..', '..', 'client', 'views', 'index.html')); }); -router.get('/api/v1/todos', (req, res, next) => { - const results = []; - // Get a Postgres client from the connection pool - pg.connect(connectionString, (err, client, done) => { - // Handle connection errors - if(err) { - done(); - console.log(err); - return res.status(500).json({success: false, data: err}); - } - // SQL Query > Select Data - const query = client.query('SELECT * FROM items ORDER BY id ASC;'); - // Stream results back one row at a time - query.on('row', (row) => { - results.push(row); - }); - // After all data is returned, close connection and return results - query.on('end', () => { - done(); - return res.json(results); - }); - }); -}); +router.get('/api/v1/todos', (req, res, next) => pg.query('SELECT * FROM items ORDER BY id ASC;') + .then(({ rows }) => res.json(rows)) + .catch(e => console.error(e) || res.status(500).json({success: false, data: e}))); -router.post('/api/v1/todos', (req, res, next) => { - const results = []; - // Grab data from http request - const data = {text: req.body.text, complete: false}; - // Get a Postgres client from the connection pool - pg.connect(connectionString, (err, client, done) => { - // Handle connection errors - if(err) { - done(); - console.log(err); - return res.status(500).json({success: false, data: err}); - } - // SQL Query > Insert Data - client.query('INSERT INTO items(text, complete) values($1, $2)', - [data.text, data.complete]); - // SQL Query > Select Data - const query = client.query('SELECT * FROM items ORDER BY id ASC'); - // Stream results back one row at a time - query.on('row', (row) => { - results.push(row); - }); - // After all data is returned, close connection and return results - query.on('end', () => { - done(); - return res.json(results); - }); - }); -}); +router.post('/api/v1/todos', (req, res, next) => pg.query('INSERT INTO items(text, complete) values($1, $2)', [req.body.text, false]) + .then(() => pg.query('SELECT * FROM items ORDER BY id ASC')) + .then(({ rows }) => res.json(rows)) + .catch(e => console.error(e) || res.status(500).json({success: false, data: e}))); router.put('/api/v1/todos/:todo_id', (req, res, next) => { - const results = []; - // Grab data from the URL parameters - const id = req.params.todo_id; - // Grab data from http request - const data = {text: req.body.text, complete: req.body.complete}; - // Get a Postgres client from the connection pool - pg.connect(connectionString, (err, client, done) => { - // Handle connection errors - if(err) { - done(); - console.log(err); - return res.status(500).json({success: false, data: err}); - } - // SQL Query > Update Data - client.query('UPDATE items SET text=($1), complete=($2) WHERE id=($3)', - [data.text, data.complete, id]); - // SQL Query > Select Data - const query = client.query("SELECT * FROM items ORDER BY id ASC"); - // Stream results back one row at a time - query.on('row', (row) => { - results.push(row); - }); - // After all data is returned, close connection and return results - query.on('end', function() { - done(); - return res.json(results); - }); - }); -}); + const { text, complete } = req.body; + const { todo_id: id } = req.params; -router.delete('/api/v1/todos/:todo_id', (req, res, next) => { - const results = []; - // Grab data from the URL parameters - const id = req.params.todo_id; - // Get a Postgres client from the connection pool - pg.connect(connectionString, (err, client, done) => { - // Handle connection errors - if(err) { - done(); - console.log(err); - return res.status(500).json({success: false, data: err}); - } - // SQL Query > Delete Data - client.query('DELETE FROM items WHERE id=($1)', [id]); - // SQL Query > Select Data - var query = client.query('SELECT * FROM items ORDER BY id ASC'); - // Stream results back one row at a time - query.on('row', (row) => { - results.push(row); - }); - // After all data is returned, close connection and return results - query.on('end', () => { - done(); - return res.json(results); - }); + return pg.query('UPDATE items SET text=($1), complete=($2) WHERE id=($3)', [text, complete, id]) + .then(() => pg.query('SELECT * FROM items ORDER BY id ASC')) + .then(({ rows }) => res.json(rows)) + .catch(e => console.log(e) || res.status(500).json({success: false, data: e})); }); -}); + +router.delete('/api/v1/todos/:todo_id', (req, res, next) => pg.query('DELETE FROM items WHERE id=($1)', [req.params.todo_id]) + .then(() => pg.query('SELECT * FROM items ORDER BY id ASC')) + .then(({ rows }) => res.json(rows)) + .catch(e => console.log(e) || res.status(500).json({success: false, data: e}))); module.exports = router;