[postgresql] How to make connection to Postgres via Node.js

I find myself trying to create a postgres database, so I installed postgres and started a server with initdb /usr/local/pgsql/data, then I started that instance with postgres -D /usr/local/pgsql/data now how can I interact with this through node? For example, what would the connectionstring be, or how am I able to find out what it is.

This question is related to postgresql node.js

The answer is


Slonik is an alternative to answers proposed by Kuberchaun and Vitaly.

Slonik implements safe connection handling; you create a connection pool and connection opening/handling is handled for you.

import {
  createPool,
  sql
} from 'slonik';

const pool = createPool('postgres://user:password@host:port/database');

return pool.connect((connection) => {
  // You are now connected to the database.
  return connection.query(sql`SELECT foo()`);
})
  .then(() => {
    // You are no longer connected to the database.
  });

postgres://user:password@host:port/database is your connection string (or more canonically a connection URI or DSN).

The benefit of this approach is that your script ensures that you never accidentally leave hanging connections.

Other benefits for using Slonik include:


Just to add a different option - I use Node-DBI to connect to PG, but also due to the ability to talk to MySQL and sqlite. Node-DBI also includes functionality to build a select statement, which is handy for doing dynamic stuff on the fly.

Quick sample (using config information stored in another file):

var DBWrapper = require('node-dbi').DBWrapper;
var config = require('./config');

var dbConnectionConfig = { host:config.db.host, user:config.db.username, password:config.db.password, database:config.db.database };
var dbWrapper = new DBWrapper('pg', dbConnectionConfig);
dbWrapper.connect();
dbWrapper.fetchAll(sql_query, null, function (err, result) {
  if (!err) {
    console.log("Data came back from the DB.");
  } else {
    console.log("DB returned an error: %s", err);
  }

  dbWrapper.close(function (close_err) {
    if (close_err) {
      console.log("Error while disconnecting: %s", close_err);
    }
  });
});

config.js:

var config = {
  db:{
    host:"plop",
    database:"musicbrainz",
    username:"musicbrainz",
    password:"musicbrainz"
  },
}
module.exports = config;

We can also use postgresql-easy. It is built on node-postgres and sqlutil. Note: pg_connection.js & your_handler.js are in the same folder. db.js is in the config folder placed.

pg_connection.js

const PgConnection = require('postgresql-easy');
const dbConfig = require('./config/db');
const pg = new PgConnection(dbConfig);
module.exports = pg;

./config/db.js

module.exports =  {
  database: 'your db',
  host: 'your host',
  port: 'your port',
  user: 'your user',
  password: 'your pwd',
}

your_handler.js

  const pg_conctn = require('./pg_connection');

  pg_conctn.getAll('your table')
    .then(res => {
         doResponseHandlingstuff();
      })
    .catch(e => {
         doErrorHandlingStuff()     
      })

Here is an example I used to connect node.js to my Postgres database.

The interface in node.js that I used can be found here https://github.com/brianc/node-postgres

var pg = require('pg');
var conString = "postgres://YourUserName:YourPassword@localhost:5432/YourDatabase";

var client = new pg.Client(conString);
client.connect();

//queries are queued and executed one after another once the connection becomes available
var x = 1000;

while (x > 0) {
    client.query("INSERT INTO junk(name, a_number) values('Ted',12)");
    client.query("INSERT INTO junk(name, a_number) values($1, $2)", ['John', x]);
    x = x - 1;
}

var query = client.query("SELECT * FROM junk");
//fired after last row is emitted

query.on('row', function(row) {
    console.log(row);
});

query.on('end', function() {
    client.end();
});



//queries can be executed either via text/parameter values passed as individual arguments
//or by passing an options object containing text, (optional) parameter values, and (optional) query name
client.query({
    name: 'insert beatle',
    text: "INSERT INTO beatles(name, height, birthday) values($1, $2, $3)",
    values: ['George', 70, new Date(1946, 02, 14)]
});

//subsequent queries with the same name will be executed without re-parsing the query plan by postgres
client.query({
    name: 'insert beatle',
    values: ['Paul', 63, new Date(1945, 04, 03)]
});
var query = client.query("SELECT * FROM beatles WHERE name = $1", ['john']);

//can stream row results back 1 at a time
query.on('row', function(row) {
    console.log(row);
    console.log("Beatle name: %s", row.name); //Beatle name: John
    console.log("Beatle birth year: %d", row.birthday.getYear()); //dates are returned as javascript dates
    console.log("Beatle height: %d' %d\"", Math.floor(row.height / 12), row.height % 12); //integers are returned as javascript ints
});

//fired after last row is emitted
query.on('end', function() {
    client.end();
});

UPDATE:- THE query.on function is now deprecated and hence the above code will not work as intended. As a solution for this look at:- query.on is not a function


One solution can be using pool of clients like the following:

const { Pool } = require('pg');
var config = {
    user: 'foo', 
    database: 'my_db', 
    password: 'secret', 
    host: 'localhost', 
    port: 5432, 
    max: 10, // max number of clients in the pool
    idleTimeoutMillis: 30000
};
const pool = new Pool(config);
pool.on('error', function (err, client) {
    console.error('idle client error', err.message, err.stack);
});
pool.query('SELECT $1::int AS number', ['2'], function(err, res) {
    if(err) {
        return console.error('error running query', err);
    }
    console.log('number:', res.rows[0].number);
});

You can see more details on this resource.


A modern and simple approach: pg-promise:

const pgp = require('pg-promise')(/* initialization options */);

const cn = {
    host: 'localhost', // server name or IP address;
    port: 5432,
    database: 'myDatabase',
    user: 'myUser',
    password: 'myPassword'
};

// alternative:
// var cn = 'postgres://username:password@host:port/database';

const db = pgp(cn); // database instance;

// select and return a single user name from id:
db.one('SELECT name FROM users WHERE id = $1', [123])
    .then(user => {
        console.log(user.name); // print user name;
    })
    .catch(error => {
        console.log(error); // print the error;
    });

// alternative - new ES7 syntax with 'await':
// await db.one('SELECT name FROM users WHERE id = $1', [123]);

See also: How to correctly declare your database module.


Connection String

The connection string is a string of the form:

postgres://[user[:password]@][host][:port][/dbname]

(where the parts in [...] can optionally be included or excluded)

Some examples of valid connection strings include:

postgres://localhost
postgres://localhost:5432
postgres://localhost/mydb
postgres://user@localhost
postgres://user:secret_password@localhost

If you've just started a database on your local machine, the connection string postgres://localhost will typically work, as that uses the default port number, username, and no password. If the database was started with a specific account, you might find you need to use postgres://pg@localhost or postgres://postgres@localhost

If none of these work, and you have installed docker, another option is to run npx @databases/pg-test start. This will start a postgres server in a docker container and then print out the connection string for you. The pg-test databases are only intended for testing though, so you will loose all your data if your computer restarts.

Connecting in node.js

You can connect to the database and issue queries using @databases/pg:

const createPool = require('@databases/pg');
const {sql} = require('@databases/pg');

// If you're using TypeScript or Babel, you can swap
// the two `require` calls for this import statement:

// import createPool, {sql} from '@databases/pg';

// create a "pool" of connections, you can think of this as a single
// connection, the pool is just used behind the scenes to improve
// performance
const db = createPool('postgres://localhost');

// wrap code in an `async` function so we can use `await`
async function run() {

  // we can run sql by tagging it as "sql" and then passing it to db.query
  await db.query(sql`
    CREATE TABLE IF NOT EXISTS beatles (
      name TEXT NOT NULL,
      height INT NOT NULL,
      birthday DATE NOT NULL
    );
  `);

  const beatle = {
    name: 'George',
    height: 70,
    birthday: new Date(1946, 02, 14),
  };

  // If we need to pass values, we can use ${...} and they will
  // be safely & securely escaped for us
  await db.query(sql`
    INSERT INTO beatles (name, height, birthday)
    VALUES (${beatle.name}, ${beatle.height}, ${beatle.birthday});
  `);

  console.log(
    await db.query(sql`SELECT * FROM beatles;`)
  );
}

run().catch(ex => {
  // It's a good idea to always report errors using
  // `console.error` and set the process.exitCode if
  // you're calling an async function at the top level
  console.error(ex);
  process.exitCode = 1;
}).then(() => {
  // For this little demonstration, we'll dispose of the
  // connection pool when we're done, so that the process
  // exists. If you're building a web server/backend API
  // you probably never need to call this.
  return db.dispose();
});

You can find a more complete guide to querying Postgres using node.js at https://www.atdatabases.org/docs/pg