diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index d60b0cc49..04e8074b9 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -3,7 +3,7 @@
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
-FROM node:12
+FROM node:20
# Avoid warnings by switching to noninteractive
ENV DEBIAN_FRONTEND=noninteractive
@@ -16,9 +16,11 @@ ARG USERNAME=node
ARG USER_UID=1000
ARG USER_GID=$USER_UID
+RUN echo "deb http://archive.debian.org/debian stretch main" > /etc/apt/sources.list
+
# Configure apt and install packages
RUN apt-get update \
- && apt-get -y install --no-install-recommends apt-utils dialog 2>&1 \
+ && apt-get -y install --no-install-recommends dialog 2>&1 \
#
# Verify git and needed tools are installed
&& apt-get -y install git iproute2 procps \
@@ -33,6 +35,7 @@ RUN apt-get update \
&& echo "deb https://dl.yarnpkg.com/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
&& apt-get update \
&& apt-get -y install --no-install-recommends yarn tmux locales postgresql \
+ && apt-get install libpq-dev g++ make \
#
# Install eslint globally
&& npm install -g eslint \
@@ -47,7 +50,6 @@ RUN apt-get update \
&& apt-get install -y sudo \
&& echo node ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME \
&& chmod 0440 /etc/sudoers.d/$USERNAME \
- #
# Clean up
&& apt-get autoremove -y \
&& apt-get clean -y \
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 14fb67344..c8e4b7108 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -1,31 +1,16 @@
// If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml.
{
- "name": "Node.js 12 & Postgres",
+ "name": "Node.js 20 & Postgres",
"dockerComposeFile": "docker-compose.yml",
"service": "web",
"workspaceFolder": "/workspace",
-
- // Use 'settings' to set *default* container specific settings.json values on container create.
- // You can edit these settings after create using File > Preferences > Settings > Remote.
- "settings": {
- "terminal.integrated.shell.linux": "/bin/bash"
- },
-
- // Uncomment the next line if you want start specific services in your Docker Compose config.
- // "runServices": [],
-
- // Uncomment the line below if you want to keep your containers running after VS Code shuts down.
- // "shutdownAction": "none",
-
- // Uncomment the next line to run commands after the container is created.
- // "postCreateCommand": "npm install",
-
- // Uncomment the next line to have VS Code connect as an existing non-root user in the container. See
- // https://aka.ms/vscode-remote/containers/non-root for details on adding a non-root user if none exist.
- // "remoteUser": "node",
-
// Add the IDs of extensions you want installed when the container is created in the array below.
- "extensions": [
- "dbaeumer.vscode-eslint"
- ]
-}
\ No newline at end of file
+ "customizations":{
+ "vscode": {
+ "extensions": ["dbaeumer.vscode-eslint"],
+ "settings": {
+ "terminal.integrated.shell.linux": "/bin/bash"
+ }
+ }
+ }
+}
diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml
index 05475b824..11e652008 100644
--- a/.devcontainer/docker-compose.yml
+++ b/.devcontainer/docker-compose.yml
@@ -3,7 +3,7 @@
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
-version: '3'
+version: '3.9'
services:
web:
# Uncomment the next line to use a non-root user for all processes. You can also
@@ -32,16 +32,21 @@ services:
# Overrides default command so things don't shut down after the process ends.
command: sleep infinity
- links:
+ depends_on:
- db
+ links:
+ - db:db
+
db:
- image: postgres
+ image: postgres:14-alpine
restart: unless-stopped
ports:
- 5432:5432
+ command: postgres -c password_encryption=md5
environment:
+ POSTGRES_HOST_AUTH_METHOD: trust
+ POSTGRES_INITDB_ARGS: "--auth-local=md5"
POSTGRES_PASSWORD: pass
POSTGRES_USER: user
POSTGRES_DB: data
-
diff --git a/.eslintignore b/.eslintignore
new file mode 100644
index 000000000..050c39538
--- /dev/null
+++ b/.eslintignore
@@ -0,0 +1 @@
+/packages/*/dist/
diff --git a/.eslintrc b/.eslintrc
index 4766b9889..b1999b544 100644
--- a/.eslintrc
+++ b/.eslintrc
@@ -1,7 +1,7 @@
{
- "plugins": ["prettier"],
+ "plugins": ["@typescript-eslint", "prettier"],
"parser": "@typescript-eslint/parser",
- "extends": ["plugin:prettier/recommended", "prettier/@typescript-eslint"],
+ "extends": ["eslint:recommended", "plugin:prettier/recommended", "prettier"],
"ignorePatterns": ["node_modules", "coverage", "packages/pg-protocol/dist/**/*", "packages/pg-query-stream/dist/**/*"],
"parserOptions": {
"ecmaVersion": 2017,
@@ -11,5 +11,25 @@
"node": true,
"es6": true,
"mocha": true
- }
+ },
+ "rules": {
+ "@typescript-eslint/no-unused-vars": ["error", {
+ "args": "none",
+ "varsIgnorePattern": "^_$"
+ }],
+ "no-unused-vars": ["error", {
+ "args": "none",
+ "varsIgnorePattern": "^_$"
+ }],
+ "no-var": "error",
+ "prefer-const": "error"
+ },
+ "overrides": [
+ {
+ "files": ["*.ts", "*.mts", "*.cts", "*.tsx"],
+ "rules": {
+ "no-undef": "off"
+ }
+ }
+ ]
}
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 000000000..94f480de9
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1 @@
+* text=auto eol=lf
\ No newline at end of file
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 000000000..7434a61c6
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1 @@
+/packages/pg-connection-string @hjr3
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ab5bef47b..e65647693 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -6,45 +6,70 @@ permissions:
contents: read
jobs:
- build:
+ lint:
+ timeout-minutes: 5
runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ persist-credentials: false
+ - name: Setup node
+ uses: actions/setup-node@v4
+ with:
+ node-version: 18
+ cache: yarn
+ - run: yarn install --frozen-lockfile
+ - run: yarn lint
+ build:
+ timeout-minutes: 15
+ needs: lint
services:
postgres:
- image: postgres:11
+ image: ghcr.io/railwayapp-templates/postgres-ssl
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
- POSTGRES_DB: ci_db_test
POSTGRES_HOST_AUTH_METHOD: 'md5'
+ POSTGRES_DB: ci_db_test
ports:
- 5432:5432
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
strategy:
+ fail-fast: false
matrix:
- node: ['10', '12', '14', '16', '18']
- os: [ubuntu-latest, windows-latest, macos-latest]
- name: Node.js ${{ matrix.node }} (${{ matrix.os }})
+ node:
+ - '16'
+ - '18'
+ - '20'
+ - '22'
+ - '23'
+ os:
+ - ubuntu-latest
+ name: Node.js ${{ matrix.node }}
+ runs-on: ubuntu-latest
env:
- PGUSER: postgres
- PGHOST: localhost
- PGPASSWORD: postgres
- PGDATABASE: ci_db_test
- PGTESTNOSSL: 'true'
- SCRAM_TEST_PGUSER: scram_test
- SCRAM_TEST_PGPASSWORD: test4scram
+ PGUSER: postgres
+ PGPASSWORD: postgres
+ PGHOST: localhost
+ PGDATABASE: ci_db_test
+ PGTESTNOSSL: 'true'
+ SCRAM_TEST_PGUSER: scram_test
+ SCRAM_TEST_PGPASSWORD: test4scram
steps:
+ - name: Show OS
+ run: |
+ uname -a
- run: |
psql \
-c "SET password_encryption = 'scram-sha-256'" \
-c "CREATE ROLE scram_test LOGIN PASSWORD 'test4scram'"
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node }}
cache: yarn
- - run: yarn install
- # TODO(bmc): get ssl tests working in ci
+ - run: yarn install --frozen-lockfile
- run: yarn test
diff --git a/.gitignore b/.gitignore
index b6e058f2e..8e242c10d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,5 +7,6 @@ package-lock.json
*.swp
dist
.DS_Store
+/.eslintcache
.vscode/
manually-test-on-heroku.js
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 011bd9e01..000000000
--- a/.travis.yml
+++ /dev/null
@@ -1,100 +0,0 @@
-language: node_js
-dist: bionic
-
-before_script: |
- yarn build
- node packages/pg/script/create-test-tables.js postgresql:///
-
-env:
- - CC=clang CXX=clang++ npm_config_clang=1 PGUSER=postgres PGDATABASE=postgres
-
-node_js:
- - lts/dubnium
- - lts/erbium
- # node 13.7 seems to have changed behavior of async iterators exiting early on streams
- # if 13.8 still has this problem when it comes down I'll talk to the node team about the change
- # in the mean time...peg to 13.6
- - 13.6
- - 14
-
-addons:
- postgresql: '10'
-
-matrix:
- include:
- # Run tests/paths that require password authentication
- - node_js: lts/erbium
- env:
- - CC=clang CXX=clang++ npm_config_clang=1 PGUSER=postgres PGDATABASE=postgres PGPASSWORD=test-password SCRAM_TEST_PGUSER=scram_test SCRAM_TEST_PGPASSWORD=test4scram
- before_script: |
- sudo -u postgres sed -i \
- -e '/^local/ s/trust$/peer/' \
- -e '/^host/ s/trust$/md5/' \
- /etc/postgresql/10/main/pg_hba.conf
- sudo -u postgres psql -c "ALTER ROLE postgres PASSWORD 'test-password'; SELECT pg_reload_conf()"
- yarn build
- node packages/pg/script/create-test-tables.js postgresql:///
- sudo -u postgres -- psql \
- -c "SET password_encryption = 'scram-sha-256'" \
- -c "CREATE ROLE scram_test login password 'test4scram'"
-
- - node_js: lts/carbon
- addons:
- postgresql: '9.5'
- dist: precise
-
- # Run tests/paths with client certificate authentication
- - node_js: lts/*
- env:
- - CC=clang CXX=clang++ npm_config_clang=1 PGUSER=postgres PGDATABASE=postgres
- PGSSLMODE=verify-full
- PGSSLROOTCERT=$TRAVIS_BUILD_DIR/packages/pg/test/tls/test-server-ca.crt
- PGSSLCERT=$TRAVIS_BUILD_DIR/packages/pg/test/tls/test-client.crt
- PGSSLKEY=$TRAVIS_BUILD_DIR/packages/pg/test/tls/test-client.key
- PG_CLIENT_CERT_TEST=1
- before_script:
- - chmod go= packages/pg/test/tls/test-client.key
- - |
- sudo sed -i \
- -e '/^ssl_cert_file =/d' \
- -e '/^ssl_key_file =/d' \
- /etc/postgresql/10/main/postgresql.conf
-
- cat <<'travis ci breaks heredoc' | sudo tee -a /etc/postgresql/10/main/postgresql.conf > /dev/null
- ssl_cert_file = 'test-server.crt'
- ssl_key_file = 'test-server.key'
- ssl_ca_file = 'test-client-ca.crt'
-
- - printf 'hostssl all all %s cert\n' 127.0.0.1/32 ::1/128 | sudo tee /etc/postgresql/10/main/pg_hba.conf > /dev/null
- - sudo make -C packages/pg/test/tls install DESTDIR=/var/ramfs/postgresql/10/main
- - sudo systemctl restart postgresql@10-main
- - yarn build
- script:
- - cd packages/pg
- - node test/integration/connection-pool/tls-tests.js
- - npm install --no-save pg-native
- - node test/integration/connection-pool/tls-tests.js native
-
- # different PostgreSQL versions on Node LTS
- - node_js: lts/erbium
- addons:
- postgresql: '9.3'
- - node_js: lts/erbium
- addons:
- postgresql: '9.4'
- - node_js: lts/erbium
- addons:
- postgresql: '9.5'
- - node_js: lts/erbium
- addons:
- postgresql: '9.6'
-
- # only run lint on latest Node LTS
- - node_js: lts/*
- script: yarn lint
-
- # PostgreSQL 9.2 only works on precise
- - node_js: lts/carbon
- addons:
- postgresql: '9.2'
- dist: precise
diff --git a/CHANGELOG.md b/CHANGELOG.md
index fff8cdf1c..24261efa4 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,26 @@ For richer information consult the commit log on github with referenced pull req
We do not include break-fix version release in this file.
+## pg@8.15.0
+
+- Add support for [esm](https://github.com/brianc/node-postgres/pull/3423) importing. CommonJS importing is still also supported.
+
+## pg@8.14.0
+
+- Add support from SCRAM-SAH-256-PLUS i.e. [channel binding](https://github.com/brianc/node-postgres/pull/3356).
+
+## pg@8.13.0
+
+- Add ability to specify query timeout on [per-query basis](https://github.com/brianc/node-postgres/pull/3074).
+
+## pg@8.12.0
+
+- Add `queryMode` config option to [force use of the extended query protocol](https://github.com/brianc/node-postgres/pull/3214) on queries without any parameters.
+
+## pg-pool@8.10.0
+
+- Emit `release` event when client is returned to [the pool](https://github.com/brianc/node-postgres/pull/2845).
+
## pg@8.9.0
- Add support for [stream factory](https://github.com/brianc/node-postgres/pull/2898).
@@ -60,7 +80,7 @@ We do not include break-fix version release in this file.
### pg@8.2.0
-- Switch internal protocol parser & serializer to [pg-protocol](https://github.com/brianc/node-postgres/tree/master/packages/pg-protocol). The change is backwards compatible but results in a significant performance improvement across the board, with some queries as much as 50% faster. This is the first work to land in an on-going performance improvment initiative I'm working on. Stay tuned as things are set to get much faster still! :rocket:
+- Switch internal protocol parser & serializer to [pg-protocol](https://github.com/brianc/node-postgres/tree/master/packages/pg-protocol). The change is backwards compatible but results in a significant performance improvement across the board, with some queries as much as 50% faster. This is the first work to land in an on-going performance improvement initiative I'm working on. Stay tuned as things are set to get much faster still! :rocket:
### pg-cursor@2.2.0
diff --git a/LOCAL_DEV.md b/LOCAL_DEV.md
new file mode 100644
index 000000000..3bbd9b456
--- /dev/null
+++ b/LOCAL_DEV.md
@@ -0,0 +1,43 @@
+# Local development
+
+Steps to install and configure Postgres on Mac for developing against locally
+
+1. Install homebrew
+2. Install postgres
+ ```sh
+ brew install postgresql
+ ```
+3. Create a database
+ ```sh
+ createdb test
+ ```
+4. Create SSL certificates
+ ```sh
+ cd /opt/homebrew/var/postgresql@14
+ openssl genrsa -aes128 2048 > server.key
+ openssl rsa -in server.key -out server.key
+ chmod 400 server.key
+ openssl req -new -key server.key -days 365 -out server.crt -x509
+ cp server.crt root.crt
+ ```
+5. Update config in `/opt/homebrew/var/postgresql@14/postgresql.conf`
+
+ ```conf
+ listen_addresses = '*'
+
+ password_encryption = md5
+
+ ssl = on
+ ssl_ca_file = 'root.crt'
+ ssl_cert_file = 'server.crt'
+ ssl_crl_file = ''
+ ssl_crl_dir = ''
+ ssl_key_file = 'server.key'
+ ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # allowed SSL ciphers
+ ssl_prefer_server_ciphers = on
+ ```
+
+6. Start Postgres server
+ ```sh
+ /opt/homebrew/opt/postgresql@14/bin/postgres -D /opt/homebrew/var/postgresql@14
+ ```
diff --git a/README.md b/README.md
index 0cf4c5e37..a680ff7b3 100644
--- a/README.md
+++ b/README.md
@@ -12,18 +12,25 @@ This repo is a monorepo which contains the core [pg](https://github.com/brianc/n
- [pg](https://github.com/brianc/node-postgres/tree/master/packages/pg)
- [pg-pool](https://github.com/brianc/node-postgres/tree/master/packages/pg-pool)
+- [pg-native](https://github.com/brianc/node-postgres/tree/master/packages/pg-native)
- [pg-cursor](https://github.com/brianc/node-postgres/tree/master/packages/pg-cursor)
- [pg-query-stream](https://github.com/brianc/node-postgres/tree/master/packages/pg-query-stream)
- [pg-connection-string](https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string)
- [pg-protocol](https://github.com/brianc/node-postgres/tree/master/packages/pg-protocol)
+## Install
+
+```
+npm install pg
+```
+
## Documentation
Each package in this repo should have its own readme more focused on how to develop/contribute. For overall documentation on the project and the related modules managed by this repo please see:
### :star: [Documentation](https://node-postgres.com) :star:
-The source repo for the documentation is https://github.com/brianc/node-postgres-docs.
+The source repo for the documentation is available for contribution [here](https://github.com/brianc/node-postgres/tree/master/docs).
### Features
@@ -55,10 +62,16 @@ You can also follow me [@briancarlson](https://twitter.com/briancarlson) if that
## Sponsorship :two_hearts:
-node-postgres's continued development has been made possible in part by generous finanical support from [the community](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md).
+node-postgres's continued development has been made possible in part by generous financial support from [the community](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md).
If you or your company are benefiting from node-postgres and would like to help keep the project financially sustainable [please consider supporting](https://github.com/sponsors/brianc) its development.
+### Featured sponsor
+
+Special thanks to [medplum](https://medplum.com) for their generous and thoughtful support of node-postgres!
+
+
+
## Contributing
**:heart: contributions!**
@@ -74,10 +87,11 @@ If your change involves breaking backwards compatibility please please point tha
### Setting up for local development
1. Clone the repo
-2. From your workspace root run `yarn` and then `yarn lerna bootstrap`
-3. Ensure you have a PostgreSQL instance running with SSL enabled and an empty database for tests
-4. Ensure you have the proper environment variables configured for connecting to the instance
-5. Run `yarn test` to run all the tests
+2. Ensure you have installed libpq-dev in your system.
+3. From your workspace root run `yarn` and then `yarn lerna bootstrap`
+4. Ensure you have a PostgreSQL instance running with SSL enabled and an empty database for tests
+5. Ensure you have the proper environment variables configured for connecting to the instance
+6. Run `yarn test` to run all the tests
## Troubleshooting and FAQ
diff --git a/SPONSORS.md b/SPONSORS.md
index c16b8d3df..dfcbbd0ab 100644
--- a/SPONSORS.md
+++ b/SPONSORS.md
@@ -15,8 +15,11 @@ node-postgres is made possible by the helpful contributors from the community as
- [mpirik](https://github.com/mpirik)
- [@BLUE-DEVIL1134](https://github.com/BLUE-DEVIL1134)
- [bubble.io](https://bubble.io/)
-- GitHub[https://github.com/github]
-- loveland [https://github.com/loveland]
+- [GitHub](https://github.com/github)
+- [n8n](https://n8n.io/)
+- [loveland](https://github.com/loveland)
+- [gajus](https://github.com/gajus)
+- [thirdiron](https://github.com/thirdiron)
# Supporters
@@ -50,3 +53,4 @@ node-postgres is made possible by the helpful contributors from the community as
- [Sideline Sports](https://github.com/SidelineSports)
- [Gadget](https://github.com/gadget-inc)
- [Sentry](https://sentry.io/welcome/)
+- [devlikeapro](https://github.com/devlikepro)
diff --git a/docs/README.md b/docs/README.md
new file mode 100644
index 000000000..d19c590b9
--- /dev/null
+++ b/docs/README.md
@@ -0,0 +1,20 @@
+# node-postgres docs website
+
+This is the documentation for node-postgres which is currently hosted at [https://node-postgres.com](https://node-postgres.com).
+
+## Development
+
+To run the documentation locally, you need to have [Node.js](https://nodejs.org) installed. Then, you can clone the repository and install the dependencies:
+
+```bash
+cd docs
+yarn
+```
+
+Once you've installed the deps, you can run the development server:
+
+```bash
+yarn dev
+```
+
+This will start a local server at [http://localhost:3000](http://localhost:3000) where you can view the documentation and see your changes.
diff --git a/docs/components/logo.tsx b/docs/components/logo.tsx
new file mode 100644
index 000000000..5d1175deb
--- /dev/null
+++ b/docs/components/logo.tsx
@@ -0,0 +1,11 @@
+import React from 'react'
+
+type Props = {
+ src: string
+ alt?: string
+}
+
+export function Logo(props: Props) {
+ const alt = props.alt || 'Logo'
+ return
+}
diff --git a/docs/pages/announcements.mdx b/docs/pages/announcements.mdx
index 6fec81ca3..d6a17c244 100644
--- a/docs/pages/announcements.mdx
+++ b/docs/pages/announcements.mdx
@@ -47,11 +47,11 @@ new Client({
- drop support for versions of node older than 8.0
-Node@6.0 has been out of LTS for quite some time now, and I've removed it from our test matrix. `pg@8.0` _may_ still work on older versions of node, but it isn't a goal of the project anymore. Node@8.0 is actually no longer in the LTS support line, but pg will continue to test against and support 8.0 until there is a compelling reason to drop support for it. Any security vulnerability issues which come up I will back-port fixes to the `pg@7.x` line and do a release, but any other fixes or improvments will not be back ported.
+Node@6.0 has been out of LTS for quite some time now, and I've removed it from our test matrix. `pg@8.0` _may_ still work on older versions of node, but it isn't a goal of the project anymore. Node@8.0 is actually no longer in the LTS support line, but pg will continue to test against and support 8.0 until there is a compelling reason to drop support for it. Any security vulnerability issues which come up I will back-port fixes to the `pg@7.x` line and do a release, but any other fixes or improvements will not be back ported.
- prevent password from being logged accidentally
-`pg@8.0` makes the password field on the pool and client non-enumerable. This means when you do `console.log(client)` you wont have your database password printed out unintenionally. You can still do `console.log(client.password)` if you really want to see it!
+`pg@8.0` makes the password field on the pool and client non-enumerable. This means when you do `console.log(client)` you wont have your database password printed out unintentionally. You can still do `console.log(client.password)` if you really want to see it!
- make `pg.native` non-enumerable
@@ -79,7 +79,7 @@ _If you find `pg` valuable to you or your business please consider [supporting](
After a _very_ long time on my todo list I've ported the docs from my old hand-rolled webapp running on route53 + elb + ec2 + dokku (I know, I went overboard!) to [gatsby](https://www.gatsbyjs.org/) hosted on [netlify](https://www.netlify.com/) which is _so_ much easier to manage. I've released the code at [https://github.com/brianc/node-postgres-docs](https://github.com/brianc/node-postgres-docs) and invite your contributions! Let's make this documentation better together. Any time changes are merged to master on the documentation repo it will automatically deploy.
-If you see an error in the docs, big or small, use the "edit on github" button to edit the page & submit a pull request right there. I'll get a new version out ASAP with your changes! If you want to add new pages of documentation open an issue if you need guidance, and I'll help you get started.
+If you see an error in the docs, big or small, use the "edit on GitHub" button to edit the page & submit a pull request right there. I'll get a new version out ASAP with your changes! If you want to add new pages of documentation open an issue if you need guidance, and I'll help you get started.
I want to extend a special **thank you** to all the [supporters](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md) and [contributors](https://github.com/brianc/node-postgres/graphs/contributors) to the project that have helped keep me going through times of burnout or life "getting in the way." ❤️
@@ -113,10 +113,11 @@ pg@7.1.2
### Example
-To demonstrate the issue & see if you are vunerable execute the following in node:
+To demonstrate the issue & see if you are vulnerable execute the following in node:
```js
-const { Client } = require('pg')
+import pg from 'pg'
+const { Client } = pg
const client = new Client()
client.connect()
diff --git a/docs/pages/apis/_meta.json b/docs/pages/apis/_meta.json
index 0b6a193c7..67da94d93 100644
--- a/docs/pages/apis/_meta.json
+++ b/docs/pages/apis/_meta.json
@@ -3,5 +3,6 @@
"pool": "pg.Pool",
"result": "pg.Result",
"types": "pg.Types",
- "cursor": "Cursor"
+ "cursor": "Cursor",
+ "utilities": "Utilities"
}
diff --git a/docs/pages/apis/client.mdx b/docs/pages/apis/client.mdx
index 92268bed8..9ce6b111a 100644
--- a/docs/pages/apis/client.mdx
+++ b/docs/pages/apis/client.mdx
@@ -13,61 +13,46 @@ type Config = {
user?: string, // default process.env.PGUSER || process.env.USER
password?: string or function, //default process.env.PGPASSWORD
host?: string, // default process.env.PGHOST
- database?: string, // default process.env.PGDATABASE || user
port?: number, // default process.env.PGPORT
+ database?: string, // default process.env.PGDATABASE || user
connectionString?: string, // e.g. postgres://user:password@host:5432/database
ssl?: any, // passed directly to node.TLSSocket, supports all tls.connect options
types?: any, // custom type parsers
statement_timeout?: number, // number of milliseconds before a statement in query will time out, default is no timeout
query_timeout?: number, // number of milliseconds before a query call will timeout, default is no timeout
+ lock_timeout?: number, // number of milliseconds a query is allowed to be en lock state before it's cancelled due to lock timeout
application_name?: string, // The name of the application that created this Client instance
connectionTimeoutMillis?: number, // number of milliseconds to wait for connection, default is no timeout
- idle_in_transaction_session_timeout?: number // number of milliseconds before terminating any session with an open idle transaction, default is no timeout
+ idle_in_transaction_session_timeout?: number, // number of milliseconds before terminating any session with an open idle transaction, default is no timeout
+ client_encoding?: string, // specifies the character set encoding that the database uses for sending data to the client
+ fallback_application_name?: string, // provide an application name to use if application_name is not set
+ options?: string // command-line options to be sent to the server
}
```
example to create a client with specific connection information:
```js
-const { Client } = require('pg')
+import { Client } from 'pg'
const client = new Client({
- host: 'my.database-server.com',
- port: 5334,
user: 'database-user',
password: 'secretpassword!!',
+ host: 'my.database-server.com',
+ port: 5334,
+ database: 'database-name',
})
```
## client.connect
-Calling `client.connect` with a callback:
-
```js
-const { Client } = require('pg')
+import { Client } from 'pg'
const client = new Client()
-client.connect((err) => {
- if (err) {
- console.error('connection error', err.stack)
- } else {
- console.log('connected')
- }
-})
-```
-Calling `client.connect` without a callback yields a promise:
-
-```js
-const { Client } = require('pg')
-const client = new Client()
-client
- .connect()
- .then(() => console.log('connected'))
- .catch((err) => console.error('connection error', err.stack))
+await client.connect()
```
-_note: connect returning a promise only available in pg@7.0 or above_
-
## client.query
### QueryConfig
@@ -91,80 +76,49 @@ type QueryConfig {
// custom type parsers just for this query result
types?: Types;
+
+ // TODO: document
+ queryMode?: string;
}
```
-### callback API
-
```ts
-client.query(text: string, values?: any[], callback?: (err: Error, result: QueryResult) => void) => void
-```
-
-**Plain text query with a callback:**
-
-```js
-const { Client } = require('pg')
-const client = new Client()
-client.connect()
-client.query('SELECT NOW()', (err, res) => {
- if (err) throw err
- console.log(res)
- client.end()
-})
+client.query(text: string, values?: any[]) => Promise
```
-**Parameterized query with a callback:**
+**Plain text query**
```js
-const { Client } = require('pg')
+import { Client } from 'pg'
const client = new Client()
-client.connect()
-client.query('SELECT $1::text as name', ['brianc'], (err, res) => {
- if (err) throw err
- console.log(res)
- client.end()
-})
-```
-### Promise API
+await client.connect()
-If you call `client.query` with query text and optional parameters but **don't** pass a callback, then you will receive a `Promise` for a query result.
+const result = await client.query('SELECT NOW()')
+console.log(result)
-```ts
-client.query(text: string, values?: any[]) => Promise
+await client.end()
```
-**Plain text query with a promise**
+**Parameterized query**
```js
-const { Client } = require('pg')
+import { Client } from 'pg'
const client = new Client()
-client.connect()
-client
- .query('SELECT NOW()')
- .then((result) => console.log(result))
- .catch((e) => console.error(e.stack))
- .then(() => client.end())
-```
-**Parameterized query with a promise**
+await client.connect()
-```js
-const { Client } = require('pg')
-const client = new Client()
-client.connect()
-client
- .query('SELECT $1::text as name', ['brianc'])
- .then((result) => console.log(result))
- .catch((e) => console.error(e.stack))
- .then(() => client.end())
+const result = await client.query('SELECT $1::text as name', ['brianc'])
+console.log(result)
+
+await client.end()
```
```ts
client.query(config: QueryConfig) => Promise
```
-**client.query with a QueryConfig and a callback**
+**client.query with a QueryConfig**
If you pass a `name` parameter to the `client.query` method, the client will create a [prepared statement](/features/queries#prepared-statements).
@@ -176,34 +130,10 @@ const query = {
rowMode: 'array',
}
-client.query(query, (err, res) => {
- if (err) {
- console.error(err.stack)
- } else {
- console.log(res.rows) // ['brianc']
- }
-})
-```
+const result = await client.query(query)
+console.log(result.rows) // ['brianc']
-**client.query with a QueryConfig and a Promise**
-
-```js
-const query = {
- name: 'get-name',
- text: 'SELECT $1::text',
- values: ['brianc'],
- rowMode: 'array',
-}
-
-// promise
-client
- .query(query)
- .then((res) => {
- console.log(res.rows) // ['brianc']
- })
- .catch((e) => {
- console.error(e.stack)
- })
+await client.end()
```
**client.query with a `Submittable`**
@@ -211,7 +141,7 @@ client
If you pass an object to `client.query` and the object has a `.submit` function on it, the client will pass it's PostgreSQL server connection to the object and delegate query dispatching to the supplied object. This is an advanced feature mostly intended for library authors. It is incidentally also currently how the callback and promise based queries above are handled internally, but this is subject to change. It is also how [pg-cursor](https://github.com/brianc/node-pg-cursor) and [pg-query-stream](https://github.com/brianc/node-pg-query-stream) work.
```js
-const Query = require('pg').Query
+import { Query } from 'pg'
const query = new Query('select $1::text as name', ['brianc'])
const result = client.query(query)
@@ -221,9 +151,11 @@ assert(query === result) // true
query.on('row', (row) => {
console.log('row!', row) // { name: 'brianc' }
})
+
query.on('end', () => {
console.log('query done')
})
+
query.on('error', (err) => {
console.error(err.stack)
})
@@ -236,25 +168,10 @@ query.on('error', (err) => {
Disconnects the client from the PostgreSQL server.
```js
-client.end((err) => {
- console.log('client has disconnected')
- if (err) {
- console.log('error during disconnection', err.stack)
- }
-})
+await client.end()
+console.log('client has disconnected')
```
-Calling end without a callback yields a promise:
-
-```js
-client
- .end()
- .then(() => console.log('client has disconnected'))
- .catch((err) => console.error('error during disconnection', err.stack))
-```
-
-_note: end returning a promise is only available in pg7.0 and above_
-
## events
### error
@@ -263,7 +180,7 @@ _note: end returning a promise is only available in pg7.0 and above_
client.on('error', (err: Error) => void) => void
```
-When the client is in the process of connecting, dispatching a query, or disconnecting it will catch and foward errors from the PostgreSQL server to the respective `client.connect` `client.query` or `client.end` callback/promise; however, the client maintains a long-lived connection to the PostgreSQL back-end and due to network partitions, back-end crashes, fail-overs, etc the client can (and over a long enough time period _will_) eventually be disconnected while it is idle. To handle this you may want to attach an error listener to a client to catch errors. Here's a contrived example:
+When the client is in the process of connecting, dispatching a query, or disconnecting it will catch and forward errors from the PostgreSQL server to the respective `client.connect` `client.query` or `client.end` promise; however, the client maintains a long-lived connection to the PostgreSQL back-end and due to network partitions, back-end crashes, fail-overs, etc the client can (and over a long enough time period _will_) eventually be disconnected while it is idle. To handle this you may want to attach an error listener to a client to catch errors. Here's a contrived example:
```js
const client = new pg.Client()
@@ -300,7 +217,7 @@ type Notification {
```js
const client = new pg.Client()
-client.connect()
+await client.connect()
client.query('LISTEN foo')
diff --git a/docs/pages/apis/cursor.mdx b/docs/pages/apis/cursor.mdx
index c4a6928c7..810bccdd3 100644
--- a/docs/pages/apis/cursor.mdx
+++ b/docs/pages/apis/cursor.mdx
@@ -1,6 +1,6 @@
---
title: pg.Cursor
-slug: /api/cursor
+slug: /apis/cursor
---
A cursor can be used to efficiently read through large result sets without loading the entire result-set into memory ahead of time. It's useful to simulate a 'streaming' style read of data, or exit early from a large result set. The cursor is passed to `client.query` and is dispatched internally in a way very similar to how normal queries are sent, but the API it presents for consuming the result set is different.
@@ -18,8 +18,8 @@ $ npm install pg pg-cursor
Instantiates a new Cursor. A cursor is an instance of `Submittable` and should be passed directly to the `client.query` method.
```js
-const { Pool } = require('pg')
-const Cursor = require('pg-cursor')
+import { Pool } from 'pg'
+import Cursor from 'pg-cursor'
const pool = new Pool()
const client = await pool.connect()
@@ -28,11 +28,9 @@ const values = [10]
const cursor = client.query(new Cursor(text, values))
-cursor.read(100, (err, rows) => {
- cursor.close(() => {
- client.release()
- })
-})
+const { rows } = await cursor.read(100)
+console.log(rows.length) // 100 (unless the table has fewer than 100 rows)
+client.release()
```
```ts
@@ -48,7 +46,7 @@ type CursorQueryConfig {
## read
-### `cursor.read(rowCount: Number, callback: (err: Error, rows: Row[], result: pg.Result) => void) => void`
+### `cursor.read(rowCount: Number) => Promise`
Read `rowCount` rows from the cursor instance. The callback will be called when the rows are available, loaded into memory, parsed, and converted to JavaScript types.
@@ -57,25 +55,22 @@ If the cursor has read to the end of the result sets all subsequent calls to cur
Here is an example of reading to the end of a cursor:
```js
-const { Pool } = require('pg')
-const Cursor = require('pg-cursor')
+import { Pool } from 'pg'
+import Cursor from 'pg-cursor'
const pool = new Pool()
const client = await pool.connect()
const cursor = client.query(new Cursor('select * from generate_series(0, 5)'))
-cursor.read(100, (err, rows) => {
- if (err) {
- throw err
- }
- assert(rows.length == 6)
- cursor.read(100, (err, rows) => {
- assert(rows.length == 0)
- })
-})
+
+let rows = await cursor.read(100)
+assert(rows.length == 6)
+
+rows = await cursor.read(100)
+assert(rows.length == 0)
```
## close
-### `cursor.close(callback: () => void) => void`
+### `cursor.close() => Promise`
Used to close the cursor early. If you want to stop reading from the cursor before you get all of the rows returned, call this.
diff --git a/docs/pages/apis/pool.mdx b/docs/pages/apis/pool.mdx
index 497e5253f..0a7dd1a43 100644
--- a/docs/pages/apis/pool.mdx
+++ b/docs/pages/apis/pool.mdx
@@ -29,9 +29,17 @@ type Config = {
idleTimeoutMillis?: number
// maximum number of clients the pool should contain
- // by default this is set to 10.
+ // by default this is set to 10. There is some nuance to setting the maxium size of your pool.
+ // see https://node-postgres.com/guides/pool-sizing for more information
max?: number
+ // minimum number of clients the pool should hold on to and _not_ destroy with the idleTimeoutMillis
+ // this can be useful if you get very bursty traffic and want to keep a few clients around.
+ // note: current the pool will not automatically create and connect new clients up to the min, it will
+ // only not evict and close clients except those which execeed the min count.
+ // the default is 0 which disables this behavior.
+ min?: number
+
// Default behavior is the pool will keep clients open & connected to the backend
// until idleTimeoutMillis expire for each client and node will maintain a ref
// to the socket on the client, keeping the event loop alive until all clients are closed
@@ -48,7 +56,7 @@ type Config = {
example to create a new pool with configuration:
```js
-const { Pool } = require('pg')
+import { Pool } from 'pg'
const pool = new Pool({
host: 'localhost',
@@ -68,33 +76,12 @@ pool.query(text: string, values?: any[]) => Promise
```
```js
-const { Pool } = require('pg')
-
-const pool = new Pool()
-
-pool
- .query('SELECT $1::text as name', ['brianc'])
- .then((res) => console.log(res.rows[0].name)) // brianc
- .catch((err) => console.error('Error executing query', err.stack))
-```
-
-Callbacks are also supported:
-
-```ts
-pool.query(text: string, values?: any[], callback?: (err?: Error, result: pg.Result)) => void
-```
-
-```js
-const { Pool } = require('pg')
+import { Pool } from 'pg'
const pool = new Pool()
-pool.query('SELECT $1::text as name', ['brianc'], (err, result) => {
- if (err) {
- return console.error('Error executing query', err.stack)
- }
- console.log(result.rows[0].name) // brianc
-})
+const result = await pool.query('SELECT $1::text as name', ['brianc'])
+console.log(result.rows[0].name) // brianc
```
Notice in the example above there is no need to check out or release a client. The pool is doing the acquiring and releasing internally. I find `pool.query` to be a handy shortcut many situations and use it exclusively unless I need a transaction.
@@ -112,7 +99,7 @@ Notice in the example above there is no need to check out or release a client. T
## pool.connect
-`pool.connect(callback: (err?: Error, client?: pg.Client, release?: releaseCallback) => void) => void`
+`pool.connect() => Promise`
Acquires a client from the pool.
@@ -121,58 +108,37 @@ Acquires a client from the pool.
- If the pool is 'full' and all clients are currently checked out will wait in a FIFO queue until a client becomes available by it being released back to the pool.
```js
-const { Pool } = require('pg')
-
-const pool = new Pool()
-
-pool.connect((err, client, release) => {
- if (err) {
- return console.error('Error acquiring client', err.stack)
- }
- client.query('SELECT NOW()', (err, result) => {
- release()
- if (err) {
- return console.error('Error executing query', err.stack)
- }
- console.log(result.rows)
- })
-})
-```
-
-`pool.connect() => Promise`
-
-```js
-const { Pool } = require('pg')
+import { Pool } from 'pg'
const pool = new Pool()
-;(async function () {
- const client = await pool.connect()
- await client.query('SELECT NOW()')
- client.release()
-})()
+const client = await pool.connect()
+await client.query('SELECT NOW()')
+client.release()
```
### releasing clients
-`release: (err?: Error)`
+`client.release(destroy?: boolean) => void`
Client instances returned from `pool.connect` will have a `release` method which will release them from the pool.
-The `release` method on an acquired client returns it back to the pool. If you pass a truthy value in the `err` position to the callback, instead of releasing the client to the pool, the pool will be instructed to disconnect and destroy this client, leaving a space within itself for a new client.
+The `release` method on an acquired client returns it back to the pool. If you pass a truthy value in the `destroy` parameter, instead of releasing the client to the pool, the pool will be instructed to disconnect and destroy this client, leaving a space within itself for a new client.
```js
-const { Pool } = require('pg')
+import { Pool } from 'pg'
const pool = new Pool()
+
// check out a single client
const client = await pool.connect()
+
// release the client
client.release()
```
```js
-const { Pool } = require('pg')
+import { Pool } from 'pg'
const pool = new Pool()
assert(pool.totalCount === 0)
@@ -184,7 +150,7 @@ assert(pool.totalCount === 1)
assert(pool.idleCount === 0)
// tell the pool to destroy this client
-client.release(true)
+await client.release(true)
assert(pool.idleCount === 0)
assert(pool.totalCount === 0)
```
@@ -205,17 +171,11 @@ Calling `pool.end` will drain the pool of all active clients, disconnect them, a
```js
// again both promises and callbacks are supported:
-const { Pool } = require('pg')
+import { Pool } from 'pg'
const pool = new Pool()
-// either this:
-pool.end(() => {
- console.log('pool has ended')
-})
-
-// or this:
-pool.end().then(() => console.log('pool has ended'))
+await pool.end()
```
## properties
@@ -266,11 +226,17 @@ If the backend goes down or a network partition is encountered all the idle, con
The error listener is passed the error as the first argument and the client upon which the error occurred as the 2nd argument. The client will be automatically terminated and removed from the pool, it is only passed to the error handler in case you want to inspect it.
-
You probably want to add an event listener to the pool to catch background errors errors!
+
You probably want to add an event listener to the pool to catch background errors!
Just like other event emitters, if a pool emits an error event and no listeners are added node will emit an
uncaught error and potentially crash your node process.
+### release
+
+`pool.on('release', (err: Error, client: Client) => void) => void`
+
+Whenever a client is released back into the pool, the pool will emit the `release` event.
+
### remove
`pool.on('remove', (client: Client) => void) => void`
diff --git a/docs/pages/apis/result.mdx b/docs/pages/apis/result.mdx
index a0ef7ddb8..da26adc70 100644
--- a/docs/pages/apis/result.mdx
+++ b/docs/pages/apis/result.mdx
@@ -1,6 +1,6 @@
---
title: pg.Result
-slug: /api/result
+slug: /apis/result
---
The `pg.Result` shape is returned for every successful query.
@@ -18,7 +18,8 @@ Every result will have a rows array. If no rows are returned the array will be e
Every result will have a fields array. This array contains the `name` and `dataTypeID` of each field in the result. These fields are ordered in the same order as the columns if you are using `arrayMode` for the query:
```js
-const { Pool } = require('pg')
+import pg from 'pg'
+const { Pool } = pg
const pool = new Pool()
@@ -37,11 +38,11 @@ await client.end()
The command type last executed: `INSERT` `UPDATE` `CREATE` `SELECT` etc.
-### `result.rowCount: int`
+### `result.rowCount: int | null`
-The number of rows processed by the last command.
+The number of rows processed by the last command. Can be `null` for commands that never affect rows, such as the `LOCK`-command. More specifically, some commands, including `LOCK`, only return a command tag of the form `COMMAND`, without any `[ROWS]`-field to parse. For such commands `rowCount` will be `null`.
-_note: this does not reflect the number of rows __returned__ from a query. e.g. an update statement could update many rows (so high `result.rowCount` value) but `result.rows.length` would be zero. To check for an empty query reponse on a `SELECT` query use `result.rows.length === 0`_.
+_note: this does not reflect the number of rows __returned__ from a query. e.g. an update statement could update many rows (so high `result.rowCount` value) but `result.rows.length` would be zero. To check for an empty query response on a `SELECT` query use `result.rows.length === 0`_.
[@sehrope](https://github.com/brianc/node-postgres/issues/2182#issuecomment-620553915) has a good explanation:
diff --git a/docs/pages/apis/types.mdx b/docs/pages/apis/types.mdx
index 55f3b0009..cc8e4c1e3 100644
--- a/docs/pages/apis/types.mdx
+++ b/docs/pages/apis/types.mdx
@@ -1,6 +1,6 @@
---
title: Types
-slug: /api/types
+slug: /apis/types
---
These docs are incomplete, for now please reference [pg-types docs](https://github.com/brianc/node-pg-types).
diff --git a/docs/pages/apis/utilities.mdx b/docs/pages/apis/utilities.mdx
new file mode 100644
index 000000000..10d9a0108
--- /dev/null
+++ b/docs/pages/apis/utilities.mdx
@@ -0,0 +1,33 @@
+---
+title: Utilities
+---
+import { Alert } from '/components/alert.tsx'
+
+## Utility Functions
+### pg.escapeIdentifier
+
+Escapes a string as a [SQL identifier](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS).
+
+```js
+import { escapeIdentifier } from 'pg';
+const escapedIdentifier = escapeIdentifier('FooIdentifier')
+console.log(escapedIdentifier) // '"FooIdentifier"'
+```
+
+
+ **Note**: When using an identifier that is the result of this function in an operation like `CREATE TABLE ${escapedIdentifier(identifier)}`, the table that is created will be CASE SENSITIVE. If you use any capital letters in the escaped identifier, you must always refer to the created table like `SELECT * from "MyCaseSensitiveTable"`; queries like `SELECT * FROM MyCaseSensitiveTable` will result in a "Non-existent table" error since case information is stripped from the query.
+
+
+### pg.escapeLiteral
+
+
+ **Note**: Instead of manually escaping SQL literals, it is recommended to use parameterized queries. Refer to [parameterized queries](/features/queries#parameterized-query) and the [client.query](/apis/client#clientquery) API for more information.
+
+
+Escapes a string as a [SQL literal](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-CONSTANTS).
+
+```js
+import { escapeLiteral } from 'pg';
+const escapedLiteral = escapeLiteral("hello 'world'")
+console.log(escapedLiteral) // "'hello ''world'''"
+```
diff --git a/docs/pages/features/_meta.json b/docs/pages/features/_meta.json
index a2f5e340a..1c7980490 100644
--- a/docs/pages/features/_meta.json
+++ b/docs/pages/features/_meta.json
@@ -5,5 +5,7 @@
"transactions": "Transactions",
"types": "Data Types",
"ssl": "SSL",
- "native": "Native"
+ "native": "Native",
+ "esm": "ESM",
+ "callbacks": "Callbacks"
}
diff --git a/docs/pages/features/callbacks.mdx b/docs/pages/features/callbacks.mdx
new file mode 100644
index 000000000..8a6e2a525
--- /dev/null
+++ b/docs/pages/features/callbacks.mdx
@@ -0,0 +1,39 @@
+---
+title: Callbacks
+---
+
+## Callback Support
+
+`async` / `await` is the preferred way to write async code these days with node, but callbacks are supported in the `pg` module and the `pg-pool` module. To use them, pass a callback function as the last argument to the following methods & it will be called and a promise will not be returned:
+
+
+```js
+const { Pool, Client } = require('pg')
+
+// pool
+const pool = new Pool()
+// run a query on an available client
+pool.query('SELECT NOW()', (err, res) => {
+ console.log(err, res)
+})
+
+// check out a client to do something more complex like a transaction
+pool.connect((err, client, release) => {
+ client.query('SELECT NOW()', (err, res) => {
+ release()
+ console.log(err, res)
+ pool.end()
+ })
+
+})
+
+// single client
+const client = new Client()
+client.connect((err) => {
+ if (err) throw err
+ client.query('SELECT NOW()', (err, res) => {
+ console.log(err, res)
+ client.end()
+ })
+})
+```
diff --git a/docs/pages/features/connecting.mdx b/docs/pages/features/connecting.mdx
index b3c5ecc40..97b5c779f 100644
--- a/docs/pages/features/connecting.mdx
+++ b/docs/pages/features/connecting.mdx
@@ -7,17 +7,13 @@ title: Connecting
node-postgres uses the same [environment variables](https://www.postgresql.org/docs/9.1/static/libpq-envars.html) as libpq and psql to connect to a PostgreSQL server. Both individual clients & pools will use these environment variables. Here's a tiny program connecting node.js to the PostgreSQL server:
```js
-const { Pool, Client } = require('pg')
+import pg from 'pg'
+const { Pool, Client } = pg
// pools will use environment variables
// for connection information
const pool = new Pool()
-pool.query('SELECT NOW()', (err, res) => {
- console.log(err, res)
- pool.end()
-})
-
// you can also use async/await
const res = await pool.query('SELECT NOW()')
await pool.end()
@@ -35,10 +31,10 @@ To run the above program and specify which database to connect to we can invoke
```sh
$ PGUSER=dbuser \
- PGHOST=database.server.com \
PGPASSWORD=secretpassword \
- PGDATABASE=mydb \
+ PGHOST=database.server.com \
PGPORT=3211 \
+ PGDATABASE=mydb \
node script.js
```
@@ -47,11 +43,11 @@ This allows us to write our programs without having to specify connection inform
The default values for the environment variables used are:
```
-PGHOST=localhost
PGUSER=process.env.USER
-PGDATABASE=process.env.USER
PGPASSWORD=null
+PGHOST=localhost
PGPORT=5432
+PGDATABASE=process.env.USER
```
## Programmatic
@@ -59,41 +55,40 @@ PGPORT=5432
node-postgres also supports configuring a pool or client programmatically with connection information. Here's our same script from above modified to use programmatic (hard-coded in this case) values. This can be useful if your application already has a way to manage config values or you don't want to use environment variables.
```js
-const { Pool, Client } = require('pg')
+import pg from 'pg'
+const { Pool, Client } = pg
const pool = new Pool({
user: 'dbuser',
- host: 'database.server.com',
- database: 'mydb',
password: 'secretpassword',
+ host: 'database.server.com',
port: 3211,
+ database: 'mydb',
})
-pool.query('SELECT NOW()', (err, res) => {
- console.log(err, res)
- pool.end()
-})
+console.log(await pool.query('SELECT NOW()'))
const client = new Client({
user: 'dbuser',
- host: 'database.server.com',
- database: 'mydb',
password: 'secretpassword',
+ host: 'database.server.com',
port: 3211,
+ database: 'mydb',
})
-client.connect()
-client.query('SELECT NOW()', (err, res) => {
- console.log(err, res)
- client.end()
-})
+await client.connect()
+
+console.log(await client.query('SELECT NOW()'))
+
+await client.end()
```
Many cloud providers include alternative methods for connecting to database instances using short-lived authentication tokens. node-postgres supports dynamic passwords via a callback function, either synchronous or asynchronous. The callback function must resolve to a string.
```js
-const { Pool } = require('pg')
-const { RDS } = require('aws-sdk')
+import pg from 'pg'
+const { Pool } = pg
+import { RDS } from 'aws-sdk'
const signerOptions = {
credentials: {
@@ -106,16 +101,16 @@ const signerOptions = {
username: 'api-user',
}
-const signer = new RDS.Signer()
+const signer = new RDS.Signer(signerOptions)
-const getPassword = () => signer.getAuthToken(signerOptions)
+const getPassword = () => signer.getAuthToken()
const pool = new Pool({
+ user: signerOptions.username,
+ password: getPassword,
host: signerOptions.hostname,
port: signerOptions.port,
- user: signerOptions.username,
database: 'my-db',
- password: getPassword,
})
```
@@ -124,39 +119,39 @@ const pool = new Pool({
Connections to unix sockets can also be made. This can be useful on distros like Ubuntu, where authentication is managed via the socket connection instead of a password.
```js
-const { Client } = require('pg')
+import pg from 'pg'
+const { Client } = pg
client = new Client({
- host: '/cloudsql/myproject:zone:mydb',
user: 'username',
password: 'password',
+ host: '/cloudsql/myproject:zone:mydb',
database: 'database_name',
})
```
## Connection URI
-You can initialize both a pool and a client with a connection string URI as well. This is common in environments like Heroku where the database connection string is supplied to your application dyno through an environment variable. Connection string parsing brought to you by [pg-connection-string](https://github.com/iceddev/pg-connection-string).
+You can initialize both a pool and a client with a connection string URI as well. This is common in environments like Heroku where the database connection string is supplied to your application dyno through an environment variable. Connection string parsing brought to you by [pg-connection-string](https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string).
```js
-const { Pool, Client } = require('pg')
+import pg from 'pg'
+const { Pool, Client } = pg
const connectionString = 'postgresql://dbuser:secretpassword@database.server.com:3211/mydb'
const pool = new Pool({
connectionString,
})
-pool.query('SELECT NOW()', (err, res) => {
- console.log(err, res)
- pool.end()
-})
+await pool.query('SELECT NOW()')
+await pool.end()
const client = new Client({
connectionString,
})
-client.connect()
-client.query('SELECT NOW()', (err, res) => {
- console.log(err, res)
- client.end()
-})
+await client.connect()
+
+await client.query('SELECT NOW()')
+
+await client.end()
```
diff --git a/docs/pages/features/esm.mdx b/docs/pages/features/esm.mdx
new file mode 100644
index 000000000..7aac546a7
--- /dev/null
+++ b/docs/pages/features/esm.mdx
@@ -0,0 +1,37 @@
+---
+title: ESM
+---
+
+## ESM Support
+
+As of v8.15.x node-postgres supporters the __ECMAScript Module__ (ESM) format. This means you can use `import` statements instead of `require` or `import pg from 'pg'`.
+
+CommonJS modules are still supported. The ESM format is an opt-in feature and will not affect existing codebases that use CommonJS.
+
+The docs have been changed to show ESM usage, but in a CommonJS context you can still use the same code, you just need to change the import format.
+
+If you're using CommonJS, you can use the following code to import the `pg` module:
+
+```js
+ const pg = require('pg')
+ const { Client } = pg
+ // etc...
+```
+
+### ESM Usage
+
+If you're using ESM, you can use the following code to import the `pg` module:
+
+```js
+ import { Client } from 'pg'
+ // etc...
+```
+
+
+Previously if you were using ESM you would have to use the following code:
+
+```js
+ import pg from 'pg'
+ const { Client } = pg
+ // etc...
+```
diff --git a/docs/pages/features/native.mdx b/docs/pages/features/native.mdx
index 698d6817b..cdec4ae9b 100644
--- a/docs/pages/features/native.mdx
+++ b/docs/pages/features/native.mdx
@@ -15,10 +15,12 @@ $ npm install pg pg-native
Once `pg-native` is installed instead of requiring a `Client` or `Pool` constructor from `pg` you do the following:
```js
-const { Client, Pool } = require('pg').native
+import pg from 'pg'
+const { native } = pg
+const { Client, Pool } = native
```
-When you access the `.native` property on `require('pg')` it will automatically require the `pg-native` package and wrap it in the same API.
+When you access the `.native` property on `'pg'` it will automatically require the `pg-native` package and wrap it in the same API.
Care has been taken to normalize between the two, but there might still be edge cases where things behave subtly differently due to the nature of using libpq over handling the binary protocol directly in JavaScript, so it's recommended you chose to either use the JavaScript driver or the native bindings both in development and production. For what its worth: I use the pure JavaScript driver because the JavaScript driver is more portable (doesn't need a compiler), and the pure JavaScript driver is plenty fast.
diff --git a/docs/pages/features/pooling.mdx b/docs/pages/features/pooling.mdx
index 4719150be..ebe2844bc 100644
--- a/docs/pages/features/pooling.mdx
+++ b/docs/pages/features/pooling.mdx
@@ -19,7 +19,7 @@ The easiest and by far most common way to use node-postgres is through a connect
### Good news
-node-postgres ships with built-in connection pooling via the [pg-pool](/api/pool) module.
+node-postgres ships with built-in connection pooling via the [pg-pool](/apis/pool) module.
## Examples
@@ -28,7 +28,8 @@ The client pool allows you to have a reusable pool of clients you can check out,
### Checkout, use, and return
```js
-const { Pool } = require('pg')
+import pg from 'pg'
+const { Pool } = pg
const pool = new Pool()
@@ -39,46 +40,11 @@ pool.on('error', (err, client) => {
process.exit(-1)
})
-// callback - checkout a client
-pool.connect((err, client, done) => {
- if (err) throw err
- client.query('SELECT * FROM users WHERE id = $1', [1], (err, res) => {
- done()
-
- if (err) {
- console.log(err.stack)
- } else {
- console.log(res.rows[0])
- }
- })
-})
-
-// promise - checkout a client
-pool.connect().then((client) => {
- return client
- .query('SELECT * FROM users WHERE id = $1', [1])
- .then((res) => {
- client.release()
- console.log(res.rows[0])
- })
- .catch((err) => {
- client.release()
- console.log(err.stack)
- })
-})
+const client = await pool.connect()
+const res = await client.query('SELECT * FROM users WHERE id = $1', [1])
+console.log(res.rows[0])
-// async/await - check out a client
-;(async () => {
- const client = await pool.connect()
- try {
- const res = await client.query('SELECT * FROM users WHERE id = $1', [1])
- console.log(res.rows[0])
- } catch (err) {
- console.log(err.stack)
- } finally {
- client.release()
- }
-})()
+client.release()
```
@@ -95,44 +61,13 @@ pool.connect().then((client) => {
If you don't need a transaction or you just need to run a single query, the pool has a convenience method to run a query on any available client in the pool. This is the preferred way to query with node-postgres if you can as it removes the risk of leaking a client.
```js
-const { Pool } = require('pg')
-
-const pool = new Pool()
-
-pool.query('SELECT * FROM users WHERE id = $1', [1], (err, res) => {
- if (err) {
- throw err
- }
-
- console.log('user:', res.rows[0])
-})
-```
-
-node-postgres also has built-in support for promises throughout all of its async APIs.
-
-```js
-const { Pool } = require('pg')
-
-const pool = new Pool()
+import pg from 'pg'
+const { Pool } = pg
-pool
- .query('SELECT * FROM users WHERE id = $1', [1])
- .then((res) => console.log('user:', res.rows[0]))
- .catch((err) =>
- setImmediate(() => {
- throw err
- })
- )
-```
-
-Promises allow us to use `async`/`await` in node v8.0 and above (or earlier if you're using babel).
-
-```js
-const { Pool } = require('pg')
const pool = new Pool()
-const { rows } = await pool.query('SELECT * FROM users WHERE id = $1', [1])
-console.log('user:', rows[0])
+const res = await pool.query('SELECT * FROM users WHERE id = $1', [1])
+console.log('user:', res.rows[0])
```
### Shutdown
@@ -140,7 +75,8 @@ console.log('user:', rows[0])
To shut down a pool call `pool.end()` on the pool. This will wait for all checked-out clients to be returned and then shut down all the clients and the pool timers.
```js
-const { Pool } = require('pg')
+import pg from 'pg'
+const { Pool } = pg
const pool = new Pool()
console.log('starting async query')
diff --git a/docs/pages/features/queries.mdx b/docs/pages/features/queries.mdx
index 0deef0d0d..39bcfbe1d 100644
--- a/docs/pages/features/queries.mdx
+++ b/docs/pages/features/queries.mdx
@@ -3,27 +3,14 @@ title: Queries
slug: /features/queries
---
-The api for executing queries supports both callbacks and promises. I'll provide an example for both _styles_ here. For the sake of brevity I am using the `client.query` method instead of the `pool.query` method - both methods support the same API. In fact, `pool.query` delegates directly to `client.query` internally.
+For the sake of brevity I am using the `client.query` method instead of the `pool.query` method - both methods support the same API. In fact, `pool.query` delegates directly to `client.query` internally.
## Text only
If your query has no parameters you do not need to include them to the query method:
```js
-// callback
-client.query('SELECT NOW() as now', (err, res) => {
- if (err) {
- console.log(err.stack)
- } else {
- console.log(res.rows[0])
- }
-})
-
-// promise
-client
- .query('SELECT NOW() as now')
- .then(res => console.log(res.rows[0]))
- .catch(e => console.error(e.stack))
+await client.query('SELECT NOW() as now')
```
## Parameterized query
@@ -34,37 +21,13 @@ If you are passing parameters to your queries you will want to avoid string conc
const text = 'INSERT INTO users(name, email) VALUES($1, $2) RETURNING *'
const values = ['brianc', 'brian.m.carlson@gmail.com']
-// callback
-client.query(text, values, (err, res) => {
- if (err) {
- console.log(err.stack)
- } else {
- console.log(res.rows[0])
- // { name: 'brianc', email: 'brian.m.carlson@gmail.com' }
- }
-})
-
-// promise
-client
- .query(text, values)
- .then(res => {
- console.log(res.rows[0])
- // { name: 'brianc', email: 'brian.m.carlson@gmail.com' }
- })
- .catch(e => console.error(e.stack))
-
-// async/await
-try {
- const res = await client.query(text, values)
- console.log(res.rows[0])
- // { name: 'brianc', email: 'brian.m.carlson@gmail.com' }
-} catch (err) {
- console.log(err.stack)
-}
+const res = await client.query(text, values)
+console.log(res.rows[0])
+// { name: 'brianc', email: 'brian.m.carlson@gmail.com' }
```
- PostgreSQL does not support parameters for identifiers. If you need to have dynamic database, schema, table, or column names (e.g. in DDL statements) use pg-format package for handling escaping these values to ensure you do not have SQL injection!
+ PostgreSQL does not support parameters for identifiers. If you need to have dynamic database, schema, table, or column names (e.g. in DDL statements) use [pg-format](https://www.npmjs.com/package/pg-format) package for handling escaping these values to ensure you do not have SQL injection!
Parameters passed as the second argument to `query()` will be converted to raw data types using the following rules:
@@ -112,20 +75,8 @@ const query = {
values: ['brianc', 'brian.m.carlson@gmail.com'],
}
-// callback
-client.query(query, (err, res) => {
- if (err) {
- console.log(err.stack)
- } else {
- console.log(res.rows[0])
- }
-})
-
-// promise
-client
- .query(query)
- .then(res => console.log(res.rows[0]))
- .catch(e => console.error(e.stack))
+const res = await client.query(query)
+console.log(res.rows[0])
```
The query config object allows for a few more advanced scenarios:
@@ -142,20 +93,8 @@ const query = {
values: [1],
}
-// callback
-client.query(query, (err, res) => {
- if (err) {
- console.log(err.stack)
- } else {
- console.log(res.rows[0])
- }
-})
-
-// promise
-client
- .query(query)
- .then(res => console.log(res.rows[0]))
- .catch(e => console.error(e.stack))
+const res = await client.query(query)
+console.log(res.rows[0])
```
In the above example the first time the client sees a query with the name `'fetch-user'` it will send a 'parse' request to the PostgreSQL server & execute the query as normal. The second time, it will skip the 'parse' request and send the _name_ of the query to the PostgreSQL server.
@@ -177,29 +116,14 @@ const query = {
rowMode: 'array',
}
-// callback
-client.query(query, (err, res) => {
- if (err) {
- console.log(err.stack)
- } else {
- console.log(res.fields.map(field => field.name)) // ['first_name', 'last_name']
- console.log(res.rows[0]) // ['Brian', 'Carlson']
- }
-})
-
-// promise
-client
- .query(query)
- .then(res => {
- console.log(res.fields.map(field => field.name)) // ['first_name', 'last_name']
- console.log(res.rows[0]) // ['Brian', 'Carlson']
- })
- .catch(e => console.error(e.stack))
+const res = await client.query(query)
+console.log(res.fields.map(field => field.name)) // ['first_name', 'last_name']
+console.log(res.rows[0]) // ['Brian', 'Carlson']
```
### Types
-You can pass in a custom set of type parsers to use when parsing the results of a particular query. The `types` property must conform to the [Types](/api/types) API. Here is an example in which every value is returned as a string:
+You can pass in a custom set of type parsers to use when parsing the results of a particular query. The `types` property must conform to the [Types](/apis/types) API. Here is an example in which every value is returned as a string:
```js
const query = {
diff --git a/docs/pages/features/ssl.mdx b/docs/pages/features/ssl.mdx
index 0428d0549..2c5e7bd9e 100644
--- a/docs/pages/features/ssl.mdx
+++ b/docs/pages/features/ssl.mdx
@@ -25,24 +25,15 @@ const config = {
import { Client, Pool } from 'pg'
const client = new Client(config)
-client.connect(err => {
- if (err) {
- console.error('error connecting', err.stack)
- } else {
- console.log('connected')
- client.end()
- }
-})
+await client.connect()
+console.log('connected')
+await client.end()
const pool = new Pool(config)
-pool
- .connect()
- .then(client => {
- console.log('connected')
- client.release()
- })
- .catch(err => console.error('error connecting', err.stack))
- .then(() => pool.end())
+const pooledClient = await pool.connect()
+console.log('connected')
+pooledClient.release()
+await pool.end()
```
## Usage with `connectionString`
@@ -59,3 +50,17 @@ const config = {
},
}
```
+
+## Channel binding
+
+If the PostgreSQL server offers SCRAM-SHA-256-PLUS (i.e. channel binding) for TLS/SSL connections, you can enable this as follows:
+
+```js
+const client = new Client({ ...config, enableChannelBinding: true})
+```
+
+or
+
+```js
+const pool = new Pool({ ...config, enableChannelBinding: true})
+```
diff --git a/docs/pages/features/transactions.mdx b/docs/pages/features/transactions.mdx
index 408db52f8..4433bd3e4 100644
--- a/docs/pages/features/transactions.mdx
+++ b/docs/pages/features/transactions.mdx
@@ -15,16 +15,10 @@ To execute a transaction with node-postgres you simply execute `BEGIN / COMMIT /
## Examples
-### async/await
-
-Things are considerably more straightforward if you're using async/await:
-
```js
-const { Pool } = require('pg')
+import { Pool } from 'pg'
const pool = new Pool()
-// note: we don't try/catch this because if connecting throws an exception
-// we don't need to dispose of the client (it will be undefined)
const client = await pool.connect()
try {
@@ -43,51 +37,3 @@ try {
client.release()
}
```
-
-### callbacks
-
-node-postgres is a very old library, and still has an optional callback API. Here's an example of doing the same code above, but with callbacks:
-
-```js
-const { Pool } = require('pg')
-const pool = new Pool()
-
-pool.connect((err, client, done) => {
- const shouldAbort = (err) => {
- if (err) {
- console.error('Error in transaction', err.stack)
- client.query('ROLLBACK', (err) => {
- if (err) {
- console.error('Error rolling back client', err.stack)
- }
- // release the client back to the pool
- done()
- })
- }
- return !!err
- }
-
- client.query('BEGIN', (err) => {
- if (shouldAbort(err)) return
- const queryText = 'INSERT INTO users(name) VALUES($1) RETURNING id'
- client.query(queryText, ['brianc'], (err, res) => {
- if (shouldAbort(err)) return
-
- const insertPhotoText = 'INSERT INTO photos(user_id, photo_url) VALUES ($1, $2)'
- const insertPhotoValues = [res.rows[0].id, 's3.bucket.foo']
- client.query(insertPhotoText, insertPhotoValues, (err, res) => {
- if (shouldAbort(err)) return
-
- client.query('COMMIT', (err) => {
- if (err) {
- console.error('Error committing transaction', err.stack)
- }
- done()
- })
- })
- })
- })
-})
-```
-
-..thank goodness for `async/await` yeah?
diff --git a/docs/pages/features/types.mdx b/docs/pages/features/types.mdx
index 65c814bae..36e8b7035 100644
--- a/docs/pages/features/types.mdx
+++ b/docs/pages/features/types.mdx
@@ -4,7 +4,7 @@ title: Data Types
import { Alert } from '/components/alert.tsx'
-PostgreSQL has a rich system of supported [data types](https://www.postgresql.org/docs/9.5/static/datatype.html). node-postgres does its best to support the most common data types out of the box and supplies an extensible type parser to allow for custom type serialization and parsing.
+PostgreSQL has a rich system of supported [data types](https://www.postgresql.org/docs/current/datatype.html). node-postgres does its best to support the most common data types out of the box and supplies an extensible type parser to allow for custom type serialization and parsing.
## strings by default
@@ -21,7 +21,7 @@ console.log(result.rows[0]) // will contain the unparsed string value of each co
### uuid + json / jsonb
-There is no data type in JavaScript for a uuid/guid so node-postgres converts a uuid to a string. JavaScript has great support for JSON and node-postgres converts json/jsonb objects directly into their JavaScript object via [`JSON.parse`](https://github.com/brianc/node-pg-types/blob/master/lib/textParsers.js#L193). Likewise sending an object to the PostgreSQL server via a query from node-postgres, node-posgres will call [`JSON.stringify`](https://github.com/brianc/node-postgres/blob/e5f0e5d36a91a72dda93c74388ac890fa42b3be0/lib/utils.js#L47) on your outbound value, automatically converting it to json for the server.
+There is no data type in JavaScript for a uuid/guid so node-postgres converts a uuid to a string. JavaScript has great support for JSON and node-postgres converts json/jsonb objects directly into their JavaScript object via [`JSON.parse`](https://github.com/brianc/node-pg-types/blob/master/lib/textParsers.js#L193). Likewise sending an object to the PostgreSQL server via a query from node-postgres, node-postgres will call [`JSON.stringify`](https://github.com/brianc/node-postgres/blob/e5f0e5d36a91a72dda93c74388ac890fa42b3be0/lib/utils.js#L47) on your outbound value, automatically converting it to json for the server.
```js
const createTableText = `
diff --git a/docs/pages/guides/_meta.json b/docs/pages/guides/_meta.json
index 3889a0992..777acb4e2 100644
--- a/docs/pages/guides/_meta.json
+++ b/docs/pages/guides/_meta.json
@@ -1,5 +1,6 @@
{
"project-structure": "Suggested Code Structure",
"async-express": "Express with Async/Await",
+ "pool-sizing": "Pool Sizing",
"upgrading": "Upgrading"
}
diff --git a/docs/pages/guides/async-express.md b/docs/pages/guides/async-express.md
index 3be6d955a..a44c15289 100644
--- a/docs/pages/guides/async-express.md
+++ b/docs/pages/guides/async-express.md
@@ -22,21 +22,18 @@ That's the same structure I used in the [project structure](/guides/project-stru
My `db/index.js` file usually starts out like this:
```js
-const { Pool } = require('pg')
+import { Pool } from 'pg'
const pool = new Pool()
-module.exports = {
- query: (text, params) => pool.query(text, params),
-}
+export const query = (text, params) => pool.query(text, params)
```
Then I will install [express-promise-router](https://www.npmjs.com/package/express-promise-router) and use it to define my routes. Here is my `routes/user.js` file:
```js
-const Router = require('express-promise-router')
-
-const db = require('../db')
+import Router from 'express-promise-router'
+import db from '../db.js'
// create a new express-promise-router
// this has the same API as the normal express router except
@@ -44,7 +41,7 @@ const db = require('../db')
const router = new Router()
// export our router to be mounted by the parent application
-module.exports = router
+export default router
router.get('/:id', async (req, res) => {
const { id } = req.params
@@ -57,22 +54,24 @@ Then in my `routes/index.js` file I'll have something like this which mounts eac
```js
// ./routes/index.js
-const users = require('./user')
-const photos = require('./photos')
+import users from './user.js'
+import photos from './photos.js'
-module.exports = (app) => {
+const mountRoutes = (app) => {
app.use('/users', users)
app.use('/photos', photos)
// etc..
}
+
+export default mountRoutes
```
And finally in my `app.js` file where I bootstrap express I will have my `routes/index.js` file mount all my routes. The routes know they're using async functions but because of express-promise-router the main express app doesn't know and doesn't care!
```js
// ./app.js
-const express = require('express')
-const mountRoutes = require('./routes')
+import express from 'express'
+import mountRoutes from './routes.js'
const app = express()
mountRoutes(app)
diff --git a/docs/pages/guides/pool-sizing.md b/docs/pages/guides/pool-sizing.md
new file mode 100644
index 000000000..5c7ddaad8
--- /dev/null
+++ b/docs/pages/guides/pool-sizing.md
@@ -0,0 +1,25 @@
+---
+title: Pool Sizing
+---
+
+If you're using a [pool](/apis/pool) in an application with multiple instances of your service running (common in most cloud/container environments currently), you'll need to think a bit about the `max` parameter of your pool across all services and all _instances_ of all services which are connecting to your Postgres server.
+
+This can get pretty complex depending on your cloud environment. Further nuance is introduced with things like pg-bouncer, RDS connection proxies, etc., which will do some forms of connection pooling and connection multiplexing. So, it's definitely worth thinking about. Let's run through a few setups. While certainly not exhaustive, these examples hopefully prompt you into thinking about what's right for your setup.
+
+## Simple apps, dev mode, fixed instance counts, etc.
+
+If your app isn't running in a k8s style env with containers scaling automatically or lambdas or cloud functions etc., you can do some "napkin math" for the `max` pool config you can use. Let's assume your Postgres instance is configured to have a maximum of 200 connections at any one time. You know your service is going to run on 4 instances. You can set the `max` pool size to 50, but if all your services are saturated waiting on database connections, you won't be able to connect to the database from any mgmt tools or scale up your services without changing config/code to adjust the max size.
+
+In this situation, I'd probably set the `max` to 20 or 25. This lets you have plenty of headroom for scaling more instances and realistically, if your app is starved for db connections, you probably want to take a look at your queries and make them execute faster, or cache, or something else to reduce the load on the database. I worked on a more reporting-heavy application with limited users, but each running 5-6 queries at a time which all took 100-200 milliseconds to run. In that situation, I upped the `max` to 50. Typically, though, I don't bother setting it to anything other than the default of `10` as that's usually _fine_.
+
+## Auto-scaling, cloud-functions, multi-tenancy, etc.
+
+If the number of instances of your services which connect to your database is more dynamic and based on things like load, auto-scaling containers, or running in cloud-functions, you need to be a bit more thoughtful about what your max might be. Often in these environments, there will be another database pooling proxy in front of the database like pg-bouncer or the RDS-proxy, etc. I'm not sure how all these function exactly, and they all have some trade-offs, but let's assume you're not using a proxy. Then I'd be pretty cautious about how large you set any individual pool. If you're running an application under pretty serious load where you need dynamic scaling or lots of lambdas spinning up and sending queries, your queries are likely fast and you should be fine setting the `max` to a low value like 10 -- or just leave it alone, since `10` is the default.
+
+## pg-bouncer, RDS-proxy, etc.
+
+I'm not sure of all the pooling services for Postgres. I haven't used any myself. Throughout the years of working on `pg`, I've addressed issues caused by various proxies behaving differently than an actual Postgres backend. There are also gotchas with things like transactions. On the other hand, plenty of people run these with much success. In this situation, I would just recommend using some small but reasonable `max` value like the default value of `10` as it can still be helpful to keep a few TCP sockets from your services to the Postgres proxy open.
+
+## Conclusion, tl;dr
+
+It's a bit of a complicated topic and doesn't have much impact on things until you need to start scaling. At that point, your number of connections _still_ probably won't be your scaling bottleneck. It's worth thinking about a bit, but mostly I'd just leave the pool size to the default of `10` until you run into troubles: hopefully you never do!
diff --git a/docs/pages/guides/project-structure.md b/docs/pages/guides/project-structure.md
index 742451daa..5f53a4183 100644
--- a/docs/pages/guides/project-structure.md
+++ b/docs/pages/guides/project-structure.md
@@ -11,8 +11,6 @@ Whenever I am writing a project & using node-postgres I like to create a file wi
## example
-_note: I am using callbacks in this example to introduce as few concepts as possible at a time, but the same is doable with promises or async/await_
-
The location doesn't really matter - I've found it usually ends up being somewhat app specific and in line with whatever folder structure conventions you're using. For this example I'll use an express app structured like so:
```
@@ -29,14 +27,12 @@ The location doesn't really matter - I've found it usually ends up being somewha
Typically I'll start out my `db/index.js` file like so:
```js
-const { Pool } = require('pg')
+import { Pool } from 'pg'
const pool = new Pool()
-module.exports = {
- query: (text, params, callback) => {
- return pool.query(text, params, callback)
- },
+export const query = (text, params) => {
+ return pool.query(text, params)
}
```
@@ -45,15 +41,11 @@ That's it. But now everywhere else in my application instead of requiring `pg` d
```js
// notice here I'm requiring my database adapter file
// and not requiring node-postgres directly
-const db = require('../db')
-
-app.get('/:id', (req, res, next) => {
- db.query('SELECT * FROM users WHERE id = $1', [req.params.id], (err, result) => {
- if (err) {
- return next(err)
- }
- res.send(result.rows[0])
- })
+import * as db from '../db/index.js'
+
+app.get('/:id', async (req, res, next) => {
+ const result = await db.query('SELECT * FROM users WHERE id = $1', [req.params.id])
+ res.send(result.rows[0])
})
// ... many other routes in this file
@@ -62,19 +54,16 @@ app.get('/:id', (req, res, next) => {
Imagine we have lots of routes scattered throughout many files under our `routes/` directory. We now want to go back and log every single query that's executed, how long it took, and the number of rows it returned. If we had required node-postgres directly in every route file we'd have to go edit every single route - that would take forever & be really error prone! But thankfully we put our data access into `db/index.js`. Let's go add some logging:
```js
-const { Pool } = require('pg')
+import { Pool } from 'pg'
const pool = new Pool()
-module.exports = {
- query: (text, params, callback) => {
- const start = Date.now()
- return pool.query(text, params, (err, res) => {
- const duration = Date.now() - start
- console.log('executed query', { text, duration, rows: res.rowCount })
- callback(err, res)
- })
- },
+export const query = async (text, params) => {
+ const start = Date.now()
+ const res = await pool.query(text, params)
+ const duration = Date.now() - start
+ console.log('executed query', { text, duration, rows: res.rowCount })
+ return res
}
```
@@ -85,112 +74,57 @@ _note: I didn't log the query parameters. Depending on your application you migh
Now what if we need to check out a client from the pool to run several queries in a row in a transaction? We can add another method to our `db/index.js` file when we need to do this:
```js
-const { Pool } = require('pg')
+import { Pool } from 'pg'
const pool = new Pool()
-module.exports = {
- query: (text, params, callback) => {
- const start = Date.now()
- return pool.query(text, params, (err, res) => {
- const duration = Date.now() - start
- console.log('executed query', { text, duration, rows: res.rowCount })
- callback(err, res)
- })
- },
- getClient: (callback) => {
- pool.connect((err, client, done) => {
- callback(err, client, done)
- })
- },
+export const query = async (text, params) => {
+ const start = Date.now()
+ const res = await pool.query(text, params)
+ const duration = Date.now() - start
+ console.log('executed query', { text, duration, rows: res.rowCount })
+ return res
}
-```
-
-Okay. Great - the simplest thing that could possibly work. It seems like one of our routes that checks out a client to run a transaction is forgetting to call `done` in some situation! Oh no! We are leaking a client & have hundreds of these routes to go audit. Good thing we have all our client access going through this single file. Lets add some deeper diagnostic information here to help us track down where the client leak is happening.
-
-```js
-const { Pool } = require('pg')
-
-const pool = new Pool()
-module.exports = {
- query: (text, params, callback) => {
- const start = Date.now()
- return pool.query(text, params, (err, res) => {
- const duration = Date.now() - start
- console.log('executed query', { text, duration, rows: res.rowCount })
- callback(err, res)
- })
- },
- getClient: (callback) => {
- pool.connect((err, client, done) => {
- const query = client.query
-
- // monkey patch the query method to keep track of the last query executed
- client.query = (...args) => {
- client.lastQuery = args
- return query.apply(client, args)
- }
-
- // set a timeout of 5 seconds, after which we will log this client's last query
- const timeout = setTimeout(() => {
- console.error('A client has been checked out for more than 5 seconds!')
- console.error(`The last executed query on this client was: ${client.lastQuery}`)
- }, 5000)
-
- const release = (err) => {
- // call the actual 'done' method, returning this client to the pool
- done(err)
-
- // clear our timeout
- clearTimeout(timeout)
-
- // set the query method back to its old un-monkey-patched version
- client.query = query
- }
-
- callback(err, client, release)
- })
- },
+export const getClient = () => {
+ return pool.connect()
}
```
-Using async/await:
+Okay. Great - the simplest thing that could possibly work. It seems like one of our routes that checks out a client to run a transaction is forgetting to call `release` in some situation! Oh no! We are leaking a client & have hundreds of these routes to go audit. Good thing we have all our client access going through this single file. Lets add some deeper diagnostic information here to help us track down where the client leak is happening.
```js
-module.exports = {
- async query(text, params) {
- const start = Date.now()
- const res = await pool.query(text, params)
- const duration = Date.now() - start
- console.log('executed query', { text, duration, rows: res.rowCount })
- return res
- },
-
- async getClient() {
- const client = await pool.connect()
- const query = client.query
- const release = client.release
- // set a timeout of 5 seconds, after which we will log this client's last query
- const timeout = setTimeout(() => {
- console.error('A client has been checked out for more than 5 seconds!')
- console.error(`The last executed query on this client was: ${client.lastQuery}`)
- }, 5000)
- // monkey patch the query method to keep track of the last query executed
- client.query = (...args) => {
- client.lastQuery = args
- return query.apply(client, args)
- }
- client.release = () => {
- // clear our timeout
- clearTimeout(timeout)
- // set the methods back to their old un-monkey-patched version
- client.query = query
- client.release = release
- return release.apply(client)
- }
- return client
- },
+export const query = async (text, params) => {
+ const start = Date.now()
+ const res = await pool.query(text, params)
+ const duration = Date.now() - start
+ console.log('executed query', { text, duration, rows: res.rowCount })
+ return res
+}
+
+export const getClient = async () => {
+ const client = await pool.connect()
+ const query = client.query
+ const release = client.release
+ // set a timeout of 5 seconds, after which we will log this client's last query
+ const timeout = setTimeout(() => {
+ console.error('A client has been checked out for more than 5 seconds!')
+ console.error(`The last executed query on this client was: ${client.lastQuery}`)
+ }, 5000)
+ // monkey patch the query method to keep track of the last query executed
+ client.query = (...args) => {
+ client.lastQuery = args
+ return query.apply(client, args)
+ }
+ client.release = () => {
+ // clear our timeout
+ clearTimeout(timeout)
+ // set the methods back to their old un-monkey-patched version
+ client.query = query
+ client.release = release
+ return release.apply(client)
+ }
+ return client
}
```
diff --git a/docs/pages/guides/upgrading.md b/docs/pages/guides/upgrading.md
index 2a1d311a2..6a09d2ec1 100644
--- a/docs/pages/guides/upgrading.md
+++ b/docs/pages/guides/upgrading.md
@@ -5,13 +5,13 @@ slug: /guides/upgrading
# Upgrading to 8.0
-node-postgres at 8.0 introduces a breaking change to ssl-verified connections. If you connect with ssl and use
+node-postgres at 8.0 introduces a breaking change to ssl-verified connections. If you connect with ssl and use
```
const client = new Client({ ssl: true })
```
-and the server's SSL certificate is self-signed, connections will fail as of node-postgres 8.0. To keep the existing behavior, modify the invocation to
+and the server's SSL certificate is self-signed, connections will fail as of node-postgres 8.0. To keep the existing behavior, modify the invocation to
```
const client = new Client({ ssl: { rejectUnauthorized: false } })
@@ -37,7 +37,7 @@ If your application still relies on these they will be _gone_ in `pg@7.0`. In or
// old way, deprecated in 6.3.0:
// connection using global singleton
-pg.connect(function(err, client, done) {
+pg.connect(function (err, client, done) {
client.query(/* etc, etc */)
done()
})
@@ -50,10 +50,10 @@ pg.end()
// new way, available since 6.0.0:
// create a pool
-var pool = new pg.Pool()
+const pool = new pg.Pool()
// connection using created pool
-pool.connect(function(err, client, done) {
+pool.connect(function (err, client, done) {
client.query(/* etc, etc */)
done()
})
@@ -102,11 +102,12 @@ If you do **not** pass a callback `client.query` will return an instance of a `P
`client.query` has always accepted any object that has a `.submit` method on it. In this scenario the client calls `.submit` on the object, delegating execution responsibility to it. In this situation the client also **returns the instance it was passed**. This is how [pg-cursor](https://github.com/brianc/node-pg-cursor) and [pg-query-stream](https://github.com/brianc/node-pg-query-stream) work. So, if you need the event emitter functionality on your queries for some reason, it is still possible because `Query` is an instance of `Submittable`:
```js
-const { Client, Query } = require('pg')
+import pg from 'pg'
+const { Client, Query } = pg
const query = client.query(new Query('SELECT NOW()'))
-query.on('row', row => {})
-query.on('end', res => {})
-query.on('error', res => {})
+query.on('row', (row) => {})
+query.on('end', (res) => {})
+query.on('error', (res) => {})
```
`Query` is considered a public, documented part of the API of node-postgres and this form will be supported indefinitely.
diff --git a/docs/pages/index.mdx b/docs/pages/index.mdx
index 2e14116b5..5a9011b01 100644
--- a/docs/pages/index.mdx
+++ b/docs/pages/index.mdx
@@ -3,6 +3,8 @@ title: Welcome
slug: /
---
+import { Logo } from '/components/logo.tsx'
+
node-postgres is a collection of node.js modules for interfacing with your PostgreSQL database. It has support for callbacks, promises, async/await, connection pooling, prepared statements, cursors, streaming results, C/C++ bindings, rich type parsing, and more! Just like PostgreSQL itself there are a lot of features: this documentation aims to get you up and running quickly and in the right direction. It also tries to provide guides for more advanced & edge-case topics allowing you to tap into the full power of PostgreSQL from node.js.
## Install
@@ -15,18 +17,33 @@ $ npm install pg
node-postgres continued development and support is made possible by the many [supporters](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md).
-If you or your company would like to sponsor node-postgres stop by [github sponsors](https://github.com/sponsors/brianc) and sign up or feel free to [email me](mailto:brian@pecanware.com) if you want to add your logo to the documentation or discuss higher tiers of sponsorship!
+Special thanks to [Medplum](https://www.medplum.com/) for sponsoring node-postgres for a whole year!
+
+
+
+
+
+If you or your company would like to sponsor node-postgres stop by [GitHub Sponsors](https://github.com/sponsors/brianc) and sign up or feel free to [email me](mailto:brian@pecanware.com) if you want to add your logo to the documentation or discuss higher tiers of sponsorship!
# Version compatibility
-node-postgres strives to be compatible with all recent lts versions of node & the most recent "stable" version. At the time of this writing node-postgres is compatible with node 8.x, 10.x, 12.x and 14.x To use node >= 14.x you will need to install `pg@8.2.x` or later due to some internal stream changes on the node 14 branch. Dropping support for an old node lts version will always be considered a breaking change in node-postgres and will be done on _major_ version number changes only, and we will try to keep support for 8.x for as long as reasonably possible.
+node-postgres strives to be compatible with all recent LTS versions of node & the most recent "stable" version. At the time of this writing node-postgres is compatible with node 18.x, 20.x, 22.x, and 24.x.
## Getting started
-This is the simplest possible way to connect, query, and disconnect with async/await:
+The simplest possible way to connect, query, and disconnect is with async/await:
```js
-const { Client } = require('pg')
+import { Client } from 'pg'
const client = new Client()
await client.connect()
@@ -35,18 +52,36 @@ console.log(res.rows[0].message) // Hello world!
await client.end()
```
-And here's the same thing with callbacks:
+### Error Handling
-```js
-const { Client } = require('pg')
+For the sake of simplicity, these docs will assume that the methods are successful. In real life use, make sure to properly handle errors thrown in the methods. A `try/catch` block is a great way to do so:
+
+```ts
+import { Client } from 'pg'
const client = new Client()
+await client.connect()
-client.connect()
+try {
+ const res = await client.query('SELECT $1::text as message', ['Hello world!'])
+ console.log(res.rows[0].message) // Hello world!
+} catch (err) {
+ console.error(err);
+} finally {
+ await client.end()
+}
+```
-client.query('SELECT $1::text as message', ['Hello world!'], (err, res) => {
- console.log(err ? err.stack : res.rows[0].message) // Hello World!
- client.end()
-})
+### Pooling
+
+In most applications you'll wannt to use a [connection pool](/features/pooling) to manage your connections. This is a more advanced topic, but here's a simple example of how to use it:
+
+```js
+import { Pool } from 'pg'
+const pool = new Pool()
+const res = await pool.query('SELECT $1::text as message', ['Hello world!'])
+console.log(res.rows[0].message) // Hello world!
```
Our real-world apps are almost always more complicated than that, and I urge you to read on!
+
+
diff --git a/docs/public/favicon.ico b/docs/public/favicon.ico
new file mode 100644
index 000000000..ab485092f
Binary files /dev/null and b/docs/public/favicon.ico differ
diff --git a/docs/theme.config.js b/docs/theme.config.js
index 263a26945..316ae7145 100644
--- a/docs/theme.config.js
+++ b/docs/theme.config.js
@@ -10,7 +10,6 @@ export default {
docsRepositoryBase: 'https://github.com/brianc/node-postgres/blob/master/docs', // base URL for the docs repository
titleSuffix: ' – node-postgres',
darkMode: true,
- footer: true,
navigation: {
prev: true,
next: true,
@@ -23,15 +22,48 @@ export default {
},
logo: (
<>
-
- node-postgres
+
+ node-postgres
>
),
+ chat: {
+ link: 'https://discord.gg/4nbb6zJa',
+ },
head: (
<>
-
-
+
+
+