Files
customer-portal/customer_portal/migrations/005_text_to_jsonb.py

197 lines
5.2 KiB
Python
Executable File

#!/usr/bin/env python3
"""Migration 005: Convert custom_fields from TEXT to JSONB.
Sprint 12: PostgreSQL JSONB enables:
- Native JSON operators (@>, ?, ?|, ?&, etc.)
- GIN index for fast searching
- Automatic JSON validation
Usage:
python -m customer_portal.migrations.005_text_to_jsonb
The migration:
1. Converts existing TEXT JSON to JSONB
2. Creates GIN index for searching
"""
import os
import sys
from sqlalchemy import create_engine, text
# Add parent directory to path
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
def get_database_url() -> str:
"""Get database URL from environment or default."""
return os.environ.get(
"DATABASE_URL", "postgresql://portal:portal@localhost:5433/customer_portal"
)
def run_migration():
"""Convert custom_fields from TEXT to JSONB."""
engine = create_engine(get_database_url())
with engine.connect() as conn:
# Check current column type
result = conn.execute(
text(
"""
SELECT data_type
FROM information_schema.columns
WHERE table_name = 'customers'
AND column_name = 'custom_fields'
"""
)
)
row = result.fetchone()
if not row:
print("Column 'custom_fields' not found in 'customers' table")
return
current_type = row[0]
print(f"Current column type: {current_type}")
if current_type == "jsonb":
print("Column is already JSONB - skipping conversion")
else:
print("Converting TEXT to JSONB...")
# Step 1: Drop the default first (can't cast default automatically)
conn.execute(
text(
"""
ALTER TABLE customers
ALTER COLUMN custom_fields DROP DEFAULT
"""
)
)
# Step 2: Update NULL/empty values to valid JSON
conn.execute(
text(
"""
UPDATE customers
SET custom_fields = '{}'
WHERE custom_fields IS NULL OR custom_fields = ''
"""
)
)
# Step 3: Convert TEXT to JSONB
conn.execute(
text(
"""
ALTER TABLE customers
ALTER COLUMN custom_fields
TYPE JSONB
USING custom_fields::jsonb
"""
)
)
# Step 4: Set new default and NOT NULL
conn.execute(
text(
"""
ALTER TABLE customers
ALTER COLUMN custom_fields
SET DEFAULT '{}'::jsonb
"""
)
)
conn.execute(
text(
"""
ALTER TABLE customers
ALTER COLUMN custom_fields
SET NOT NULL
"""
)
)
print("Column converted to JSONB")
# Check if GIN index exists
result = conn.execute(
text(
"""
SELECT indexname
FROM pg_indexes
WHERE tablename = 'customers'
AND indexname = 'ix_customers_custom_fields'
"""
)
)
if result.fetchone():
print("GIN index already exists")
else:
print("Creating GIN index...")
conn.execute(
text(
"""
CREATE INDEX ix_customers_custom_fields
ON customers
USING GIN (custom_fields)
"""
)
)
print("GIN index created")
conn.commit()
print("\nMigration completed successfully!")
# Show example queries
print("\n--- Example JSONB Queries ---")
print("-- Find customers from Wien:")
print("SELECT * FROM customers WHERE custom_fields->>'ort' = 'Wien';")
print("")
print("-- Find customers with phone number:")
print("SELECT * FROM customers WHERE custom_fields ? 'telefon';")
print("")
print("-- Find customers with specific field value (uses GIN index):")
print('SELECT * FROM customers WHERE custom_fields @> \'{"ort": "Wien"}\';')
def rollback():
"""Rollback: Convert JSONB back to TEXT."""
engine = create_engine(get_database_url())
with engine.connect() as conn:
print("Rolling back: Converting JSONB to TEXT...")
# Drop GIN index first
conn.execute(
text(
"""
DROP INDEX IF EXISTS ix_customers_custom_fields
"""
)
)
# Convert JSONB to TEXT
conn.execute(
text(
"""
ALTER TABLE customers
ALTER COLUMN custom_fields
TYPE TEXT
USING custom_fields::TEXT
"""
)
)
conn.commit()
print("Rollback completed")
if __name__ == "__main__":
if len(sys.argv) > 1 and sys.argv[1] == "--rollback":
rollback()
else:
run_migration()