You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
# Connect
psql -h localhost -U myuser -d mydb
psql "postgresql://user:pass@host:5432/dbname"# Common flags
psql -c "SELECT COUNT(*) FROM users"# Run single query
psql -f script.sql # Run SQL file
psql -A -t -c "SELECT id FROM users"# Unaligned, tuples-only (for scripting)# Inside psql\dt # List tables\d+ table_name # Describe table with details\dn # List schemas\du # List users/roles\l# List databases\timing on # Show query execution time\x# Toggle expanded display\copy table TO 'file.csv' CSV HEADER # Export to CSV\i script.sql # Execute SQL file
mysql
# Connect
mysql -h localhost -u root -p mydb
# Common flags
mysql -e "SELECT COUNT(*) FROM users" mydb # Run single query
mysql < script.sql # Run SQL file# Inside mysql
SHOW DATABASES;
SHOW TABLES;
DESCRIBE table_name;
SHOW CREATE TABLE table_name;
SHOW PROCESSLIST;# Active connections
DuckDB CLI
# Interactive mode
duckdb warehouse.db
# Run query directly
duckdb -c "SELECT * FROM read_parquet('data.parquet') LIMIT 10"# Query CSV/Parquet without a database
duckdb -c "SELECT COUNT(*) FROM read_csv_auto('data.csv')"
duckdb -c "DESCRIBE SELECT * FROM read_parquet('data.parquet')"# Export results
duckdb -c "COPY (SELECT * FROM t) TO 'output.csv' (HEADER, DELIMITER ',')"
# Run query
bq query --use_legacy_sql=false 'SELECT COUNT(*) FROM dataset.table'# List datasets/tables
bq ls
bq ls dataset_name
# Show table schema
bq show --schema --format=prettyjson dataset.table
# Load data
bq load --source_format=PARQUET dataset.table gs://bucket/data.parquet
# Export
bq extract dataset.table gs://bucket/export.csv
# Setup
dbt init my_project # Create new project
dbt debug # Test connection# Run models
dbt run # Run all models
dbt run --select model_name # Run specific model
dbt run --select +model_name # Model + upstream dependencies
dbt run --select model_name+ # Model + downstream dependencies
dbt run --select tag:daily # Run models with tag# Testing
dbt test# Run all tests
dbt test --select model_name # Test specific model
dbt source freshness # Check source freshness# Documentation
dbt docs generate # Generate docs
dbt docs serve # Serve docs locally# Other
dbt seed # Load CSV files from seeds/
dbt snapshot # Run snapshots (SCD Type 2)
dbt clean # Remove compiled files
dbt compile # Compile SQL without running
Useful Flags
dbt run --full-refresh # Rebuild incremental models
dbt run --vars '{"date": "2024-01-01"}'# Pass variables
dbt run --target prod # Run against production
dbt run --threads 8 # Parallel execution
dbt run --fail-fast # Stop on first error
Selection Syntax
# By name
dbt run --select my_model
# By path
dbt run --select models/marts/
# By tag
dbt run --select tag:nightly
# Graph operators
dbt run --select +my_model # Model + all upstream
dbt run --select my_model+ # Model + all downstream
dbt run --select +my_model+ # Model + all upstream + downstream
dbt run --select 1+my_model # Model + 1 level upstream# Exclude
dbt run --exclude staging.*
Docker for Data Infrastructure
Common Data Services
# PostgreSQL
docker run -d --name postgres \
-e POSTGRES_PASSWORD=secret \
-p 5432:5432 \
postgres:16
# MySQL
docker run -d --name mysql \
-e MYSQL_ROOT_PASSWORD=secret \
-p 3306:3306 \
mysql:8
# Redis
docker run -d --name redis \
-p 6379:6379 \
redis:7
# MinIO (S3-compatible storage)
docker run -d --name minio \
-p 9000:9000 -p 9001:9001 \
-e MINIO_ROOT_USER=admin \
-e MINIO_ROOT_PASSWORD=password \
minio/minio server /data --console-address ":9001"