Skip to content

Database CLI

Command-line tools for database interaction.

Connection Tools

psql

PostgreSQL's built-in command-line client.

# Basic connection
psql -h localhost -U postgres -d sartiq

# Connection string
psql "postgresql://user:pass@localhost:5432/sartiq"

# With SSL
psql "postgresql://user:pass@host:5432/db?sslmode=require"

# Environment variables
export PGHOST=localhost
export PGUSER=postgres
export PGDATABASE=sartiq
export PGPASSWORD=secret
psql

pgcli

Enhanced psql with autocomplete and syntax highlighting.

# Install
pip install pgcli
# or
brew install pgcli

# Connect
pgcli -h localhost -U postgres -d sartiq
pgcli postgresql://user:pass@localhost:5432/sartiq

# With named connection
pgcli --alias local  # Uses ~/.config/pgcli/config

Configuration (~/.config/pgcli/config):

[alias_dsn]
local = postgresql://postgres:postgres@localhost:5432/sartiq
staging = postgresql://user:pass@staging-db:5432/sartiq

psql Commands

\l              List databases
\c dbname       Connect to database
\dt             List tables
\dt+            List tables with sizes
\d tablename    Describe table
\d+ tablename   Describe with storage info
\di             List indexes
\df             List functions
\dn             List schemas
\du             List users/roles

Output Control

\x              Toggle expanded output (vertical)
\x auto         Auto-expand for wide results
\pset format    Set output format (aligned, unaligned, csv, html)
\pset border 2  Add borders to tables
\timing         Toggle query timing
\o file.txt     Output to file
\o              Output to stdout again

Query Execution

\i file.sql     Execute SQL file
\e              Edit query in $EDITOR
\g              Execute buffer
\r              Reset (clear) query buffer
\p              Print query buffer

Copy Operations

-- Export to CSV
\copy users TO '/tmp/users.csv' WITH CSV HEADER

-- Import from CSV
\copy users FROM '/tmp/users.csv' WITH CSV HEADER

-- Export query result
\copy (SELECT * FROM users WHERE active) TO '/tmp/active.csv' WITH CSV HEADER

Common SQL Operations

Data Inspection

-- Table overview
SELECT * FROM users LIMIT 10;

-- Column info
SELECT column_name, data_type, is_nullable
FROM information_schema.columns
WHERE table_name = 'users';

-- Row count (fast estimate)
SELECT reltuples::bigint FROM pg_class WHERE relname = 'users';

-- Row count (exact)
SELECT COUNT(*) FROM users;

-- Sample random rows
SELECT * FROM users ORDER BY RANDOM() LIMIT 10;

-- Find duplicates
SELECT email, COUNT(*)
FROM users
GROUP BY email
HAVING COUNT(*) > 1;

Quick Updates

-- Update single record
UPDATE users SET status = 'active' WHERE id = 1;

-- Update with returning
UPDATE users SET status = 'inactive' WHERE last_login < '2024-01-01' RETURNING id, email;

-- Conditional update
UPDATE users
SET tier = CASE
    WHEN total_spent > 1000 THEN 'gold'
    WHEN total_spent > 100 THEN 'silver'
    ELSE 'bronze'
END;

Data Cleanup

-- Delete old records
DELETE FROM sessions WHERE expires_at < NOW();

-- Truncate (fast, resets sequences)
TRUNCATE TABLE logs RESTART IDENTITY;

-- Truncate with cascade
TRUNCATE TABLE users CASCADE;

Migration Commands

Alembic

# Show current revision
uv run alembic current

# Show history
uv run alembic history
uv run alembic history --verbose

# Create migration
uv run alembic revision --autogenerate -m "Add users table"
uv run alembic revision -m "Custom migration"

# Upgrade
uv run alembic upgrade head
uv run alembic upgrade +1
uv run alembic upgrade abc123

# Downgrade
uv run alembic downgrade -1
uv run alembic downgrade abc123
uv run alembic downgrade base

# Show SQL without executing
uv run alembic upgrade head --sql

Manual Migrations

# Apply SQL file
psql -h localhost -U postgres -d sartiq -f migration.sql

# Apply with transaction
psql -h localhost -U postgres -d sartiq -1 -f migration.sql

# Apply multiple files
cat migrations/*.sql | psql -h localhost -U postgres -d sartiq

Data Seeding

From SQL File

# Seed data
psql -h localhost -U postgres -d sartiq -f seeds/users.sql

# Seed with environment variable substitution
envsubst < seeds/template.sql | psql -h localhost -U postgres -d sartiq

From CSV

# Import CSV
psql -h localhost -U postgres -d sartiq -c "\copy users FROM 'users.csv' WITH CSV HEADER"

# Export and reimport (table copy)
pg_dump -h localhost -U postgres -t users --data-only sartiq | \
    psql -h localhost -U postgres -d sartiq_test

Python Seeder

# seeds/run.py
import asyncio
from faker import Faker
from app.database import async_session
from app.models import User

fake = Faker()

async def seed_users(count: int = 100):
    async with async_session() as db:
        for _ in range(count):
            user = User(
                name=fake.name(),
                email=fake.email(),
                created_at=fake.date_time_this_year()
            )
            db.add(user)
        await db.commit()

if __name__ == "__main__":
    asyncio.run(seed_users())
uv run python seeds/run.py

Database Administration

Connection Management

-- View active connections
SELECT pid, usename, application_name, client_addr, state, query_start, query
FROM pg_stat_activity
WHERE datname = 'sartiq';

-- Kill connection
SELECT pg_terminate_backend(pid);

-- Kill idle connections
SELECT pg_terminate_backend(pid)
FROM pg_stat_activity
WHERE datname = 'sartiq'
  AND state = 'idle'
  AND query_start < NOW() - INTERVAL '10 minutes';

Lock Inspection

-- View locks
SELECT
    l.pid,
    l.locktype,
    l.mode,
    l.granted,
    a.query
FROM pg_locks l
JOIN pg_stat_activity a ON l.pid = a.pid
WHERE NOT l.granted;

-- View blocking queries
SELECT
    blocked.pid AS blocked_pid,
    blocked.query AS blocked_query,
    blocking.pid AS blocking_pid,
    blocking.query AS blocking_query
FROM pg_stat_activity blocked
JOIN pg_locks blocked_locks ON blocked.pid = blocked_locks.pid
JOIN pg_locks blocking_locks ON blocked_locks.locktype = blocking_locks.locktype
    AND blocked_locks.relation = blocking_locks.relation
    AND blocked_locks.pid != blocking_locks.pid
JOIN pg_stat_activity blocking ON blocking_locks.pid = blocking.pid
WHERE NOT blocked_locks.granted;

Performance Investigation

-- Slow queries (requires pg_stat_statements)
SELECT query, calls, mean_exec_time, total_exec_time
FROM pg_stat_statements
ORDER BY total_exec_time DESC
LIMIT 10;

-- Missing indexes (sequential scans on large tables)
SELECT relname, seq_scan, seq_tup_read, idx_scan, idx_tup_fetch
FROM pg_stat_user_tables
WHERE seq_scan > idx_scan
  AND seq_tup_read > 10000
ORDER BY seq_tup_read DESC;

-- Table bloat
SELECT schemaname, relname, n_dead_tup, n_live_tup,
       ROUND(100.0 * n_dead_tup / NULLIF(n_live_tup + n_dead_tup, 0), 2) AS dead_pct
FROM pg_stat_user_tables
WHERE n_dead_tup > 1000
ORDER BY n_dead_tup DESC;

Backup & Restore

Quick Backup

# SQL dump
pg_dump -h localhost -U postgres sartiq > backup.sql

# Compressed dump
pg_dump -h localhost -U postgres -Fc sartiq > backup.dump

# Specific tables
pg_dump -h localhost -U postgres -t users -t bookings sartiq > tables.sql

# Schema only
pg_dump -h localhost -U postgres --schema-only sartiq > schema.sql

# Data only
pg_dump -h localhost -U postgres --data-only sartiq > data.sql

Quick Restore

# From SQL
psql -h localhost -U postgres sartiq < backup.sql

# From compressed dump
pg_restore -h localhost -U postgres -d sartiq backup.dump

# To new database
createdb -h localhost -U postgres sartiq_restored
pg_restore -h localhost -U postgres -d sartiq_restored backup.dump

Database Copy

# Copy database locally
createdb -h localhost -U postgres -T sartiq sartiq_copy

# Copy between servers
pg_dump -h source_host -U postgres sartiq | \
    psql -h target_host -U postgres sartiq

Docker Database Access

Direct Connection

# psql in container
docker exec -it postgres psql -U postgres -d sartiq

# pgcli in container (if installed)
docker exec -it postgres pgcli -U postgres -d sartiq

# Connect from host to container
psql -h localhost -p 5432 -U postgres -d sartiq

Docker Compose

# Execute SQL
docker compose exec db psql -U postgres -d sartiq

# Run migration
docker compose exec api uv run alembic upgrade head

# Import data
docker compose exec -T db psql -U postgres -d sartiq < data.sql

# Backup
docker compose exec -T db pg_dump -U postgres sartiq > backup.sql

Useful Scripts

Create test database

#!/bin/bash
# scripts/create_test_db.sh

DB_NAME="sartiq_test"
DB_USER="postgres"
DB_HOST="localhost"

# Drop if exists
dropdb -h $DB_HOST -U $DB_USER --if-exists $DB_NAME

# Create
createdb -h $DB_HOST -U $DB_USER $DB_NAME

# Run migrations
DATABASE_URL="postgresql://$DB_USER@$DB_HOST/$DB_NAME" uv run alembic upgrade head

# Seed
psql -h $DB_HOST -U $DB_USER -d $DB_NAME -f seeds/test_data.sql

echo "Test database ready: $DB_NAME"

Database reset

#!/bin/bash
# scripts/reset_db.sh

DB_NAME="sartiq"
DB_USER="postgres"
DB_HOST="localhost"

read -p "Reset database $DB_NAME? [y/N] " confirm
if [[ $confirm != "y" ]]; then
    echo "Aborted"
    exit 1
fi

# Terminate connections
psql -h $DB_HOST -U $DB_USER -c "
    SELECT pg_terminate_backend(pid)
    FROM pg_stat_activity
    WHERE datname = '$DB_NAME' AND pid != pg_backend_pid();
"

# Drop and recreate
dropdb -h $DB_HOST -U $DB_USER $DB_NAME
createdb -h $DB_HOST -U $DB_USER $DB_NAME

# Migrate
uv run alembic upgrade head

echo "Database reset complete"

Shell Aliases

# Add to .zshrc or .bashrc
alias pglocal="pgcli -h localhost -U postgres -d sartiq"
alias pgstaging="pgcli 'postgresql://user:pass@staging:5432/sartiq'"

alias dbmigrate="uv run alembic upgrade head"
alias dbrollback="uv run alembic downgrade -1"
alias dbnew="uv run alembic revision --autogenerate -m"

alias dbdump="pg_dump -h localhost -U postgres -Fc sartiq"
alias dbrestore="pg_restore -h localhost -U postgres -d sartiq"