Initial commit - Customer Portal for Coolify
This commit is contained in:
103
customer_portal/migrations/001_add_custom_fields.py
Executable file
103
customer_portal/migrations/001_add_custom_fields.py
Executable file
@@ -0,0 +1,103 @@
|
||||
"""Migration: Add custom_fields column to customers table.
|
||||
|
||||
Sprint 6.6: Enable dynamic WordPress field synchronization.
|
||||
|
||||
This migration adds a TEXT column for storing JSON-encoded custom fields
|
||||
from WordPress booking forms, enabling automatic synchronization without
|
||||
schema changes.
|
||||
|
||||
Usage:
|
||||
python -m customer_portal.migrations.001_add_custom_fields
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from sqlalchemy import create_engine, inspect, text
|
||||
|
||||
|
||||
def get_database_url() -> str:
|
||||
"""Get database URL from environment."""
|
||||
return os.getenv(
|
||||
"DATABASE_URL",
|
||||
os.getenv(
|
||||
"SQLALCHEMY_DATABASE_URI",
|
||||
"sqlite:///customer_portal.db",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def migrate():
|
||||
"""Add custom_fields column to customers table."""
|
||||
database_url = get_database_url()
|
||||
engine = create_engine(database_url)
|
||||
|
||||
inspector = inspect(engine)
|
||||
columns = [col["name"] for col in inspector.get_columns("customers")]
|
||||
|
||||
if "custom_fields" in columns:
|
||||
print("Column 'custom_fields' already exists. Skipping.")
|
||||
return True
|
||||
|
||||
print("Adding 'custom_fields' column to customers table...")
|
||||
|
||||
# Detect database type for appropriate SQL
|
||||
dialect = engine.dialect.name
|
||||
|
||||
with engine.begin() as conn:
|
||||
if dialect in {"sqlite", "postgresql"}:
|
||||
conn.execute(
|
||||
text("ALTER TABLE customers ADD COLUMN custom_fields TEXT DEFAULT '{}'")
|
||||
)
|
||||
elif dialect == "mysql":
|
||||
conn.execute(text("ALTER TABLE customers ADD COLUMN custom_fields TEXT"))
|
||||
conn.execute(
|
||||
text(
|
||||
"UPDATE customers SET custom_fields = '{}' WHERE custom_fields IS NULL"
|
||||
)
|
||||
)
|
||||
else:
|
||||
# Generic SQL
|
||||
conn.execute(
|
||||
text("ALTER TABLE customers ADD COLUMN custom_fields TEXT DEFAULT '{}'")
|
||||
)
|
||||
|
||||
print("Migration completed successfully.")
|
||||
return True
|
||||
|
||||
|
||||
def rollback():
|
||||
"""Remove custom_fields column (if needed)."""
|
||||
database_url = get_database_url()
|
||||
engine = create_engine(database_url)
|
||||
|
||||
inspector = inspect(engine)
|
||||
columns = [col["name"] for col in inspector.get_columns("customers")]
|
||||
|
||||
if "custom_fields" not in columns:
|
||||
print("Column 'custom_fields' does not exist. Skipping rollback.")
|
||||
return True
|
||||
|
||||
print("WARNING: Rolling back will DELETE all custom field data!")
|
||||
|
||||
dialect = engine.dialect.name
|
||||
|
||||
with engine.begin() as conn:
|
||||
if dialect == "sqlite":
|
||||
# SQLite doesn't support DROP COLUMN directly
|
||||
print("SQLite does not support DROP COLUMN. Manual migration required.")
|
||||
return False
|
||||
else:
|
||||
conn.execute(text("ALTER TABLE customers DROP COLUMN custom_fields"))
|
||||
|
||||
print("Rollback completed.")
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) > 1 and sys.argv[1] == "--rollback":
|
||||
success = rollback()
|
||||
else:
|
||||
success = migrate()
|
||||
|
||||
sys.exit(0 if success else 1)
|
||||
101
customer_portal/migrations/002_add_admin_and_settings.py
Executable file
101
customer_portal/migrations/002_add_admin_and_settings.py
Executable file
@@ -0,0 +1,101 @@
|
||||
"""Add admin_users table and portal_settings table.
|
||||
|
||||
Run with:
|
||||
docker exec customer_portal python -m customer_portal.migrations.002_add_admin_and_settings
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
|
||||
|
||||
def run_migration():
|
||||
"""Create admin_users and portal_settings tables."""
|
||||
database_url = os.environ.get(
|
||||
"DATABASE_URL", "postgresql://portal:portal@localhost:5432/customer_portal"
|
||||
)
|
||||
|
||||
engine = create_engine(database_url)
|
||||
|
||||
with engine.connect() as conn:
|
||||
# Check if admin_users table exists
|
||||
result = conn.execute(
|
||||
text(
|
||||
"""
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_name = 'admin_users'
|
||||
"""
|
||||
)
|
||||
)
|
||||
if not result.fetchone():
|
||||
print("Creating admin_users table...")
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
CREATE TABLE admin_users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
username VARCHAR(100) UNIQUE NOT NULL,
|
||||
password_hash VARCHAR(255) NOT NULL,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255),
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_login_at TIMESTAMP
|
||||
)
|
||||
"""
|
||||
)
|
||||
)
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
CREATE INDEX ix_admin_users_username ON admin_users(username)
|
||||
"""
|
||||
)
|
||||
)
|
||||
conn.commit()
|
||||
print("Table admin_users created successfully.")
|
||||
else:
|
||||
print("Table admin_users already exists.")
|
||||
|
||||
# Check if portal_settings table exists
|
||||
result = conn.execute(
|
||||
text(
|
||||
"""
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_name = 'portal_settings'
|
||||
"""
|
||||
)
|
||||
)
|
||||
if not result.fetchone():
|
||||
print("Creating portal_settings table...")
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
CREATE TABLE portal_settings (
|
||||
id SERIAL PRIMARY KEY,
|
||||
key VARCHAR(100) UNIQUE NOT NULL,
|
||||
value TEXT NOT NULL DEFAULT '{}',
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
"""
|
||||
)
|
||||
)
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
CREATE INDEX ix_portal_settings_key ON portal_settings(key)
|
||||
"""
|
||||
)
|
||||
)
|
||||
conn.commit()
|
||||
print("Table portal_settings created successfully.")
|
||||
else:
|
||||
print("Table portal_settings already exists.")
|
||||
|
||||
print("Migration completed successfully!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run_migration()
|
||||
75
customer_portal/migrations/003_add_email_preferences.py
Executable file
75
customer_portal/migrations/003_add_email_preferences.py
Executable file
@@ -0,0 +1,75 @@
|
||||
"""Add email preference columns to customers.
|
||||
|
||||
Run with:
|
||||
docker exec customer_portal python -m customer_portal.migrations.003_add_email_preferences
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
|
||||
|
||||
def run_migration():
|
||||
"""Add email_invoices and email_marketing columns."""
|
||||
database_url = os.environ.get(
|
||||
"DATABASE_URL", "postgresql://portal:portal@localhost:5432/customer_portal"
|
||||
)
|
||||
|
||||
engine = create_engine(database_url)
|
||||
|
||||
with engine.connect() as conn:
|
||||
# Check and add email_invoices column
|
||||
result = conn.execute(
|
||||
text(
|
||||
"""
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'customers' AND column_name = 'email_invoices'
|
||||
"""
|
||||
)
|
||||
)
|
||||
if not result.fetchone():
|
||||
print("Adding email_invoices column...")
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
ALTER TABLE customers
|
||||
ADD COLUMN email_invoices BOOLEAN DEFAULT TRUE
|
||||
"""
|
||||
)
|
||||
)
|
||||
conn.commit()
|
||||
print("Column email_invoices added.")
|
||||
else:
|
||||
print("Column email_invoices already exists.")
|
||||
|
||||
# Check and add email_marketing column
|
||||
result = conn.execute(
|
||||
text(
|
||||
"""
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'customers' AND column_name = 'email_marketing'
|
||||
"""
|
||||
)
|
||||
)
|
||||
if not result.fetchone():
|
||||
print("Adding email_marketing column...")
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
ALTER TABLE customers
|
||||
ADD COLUMN email_marketing BOOLEAN DEFAULT FALSE
|
||||
"""
|
||||
)
|
||||
)
|
||||
conn.commit()
|
||||
print("Column email_marketing added.")
|
||||
else:
|
||||
print("Column email_marketing already exists.")
|
||||
|
||||
print("Migration completed!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run_migration()
|
||||
259
customer_portal/migrations/004_consolidate_customer_fields.py
Executable file
259
customer_portal/migrations/004_consolidate_customer_fields.py
Executable file
@@ -0,0 +1,259 @@
|
||||
"""Consolidate fixed customer fields into custom_fields JSON.
|
||||
|
||||
Sprint 12 Phase 1: Move name, phone, address fields into custom_fields.
|
||||
This eliminates redundancy between fixed columns and flexible JSON storage.
|
||||
|
||||
Run with:
|
||||
docker exec customer_portal python -m customer_portal.migrations.004_consolidate_customer_fields
|
||||
|
||||
Rollback with:
|
||||
docker exec customer_portal python -m customer_portal.migrations.004_consolidate_customer_fields --rollback
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
|
||||
|
||||
def get_engine():
|
||||
"""Get database engine."""
|
||||
database_url = os.environ.get(
|
||||
"DATABASE_URL", "postgresql://portal:portal@localhost:5432/customer_portal"
|
||||
)
|
||||
return create_engine(database_url)
|
||||
|
||||
|
||||
def run_migration():
|
||||
"""Move fixed field values into custom_fields JSON."""
|
||||
engine = get_engine()
|
||||
|
||||
print("Sprint 12 Phase 1: Consolidating customer fields...")
|
||||
print("=" * 60)
|
||||
|
||||
with engine.connect() as conn:
|
||||
# Get all customers
|
||||
result = conn.execute(
|
||||
text(
|
||||
"""
|
||||
SELECT id, email, name, phone, address_street, address_city, address_zip, custom_fields
|
||||
FROM customers
|
||||
ORDER BY id
|
||||
"""
|
||||
)
|
||||
)
|
||||
customers = result.fetchall()
|
||||
|
||||
migrated = 0
|
||||
skipped = 0
|
||||
|
||||
for row in customers:
|
||||
customer_id = row[0]
|
||||
email = row[1]
|
||||
name = row[2]
|
||||
phone = row[3]
|
||||
address_street = row[4]
|
||||
address_city = row[5]
|
||||
address_zip = row[6]
|
||||
custom_fields_raw = row[7]
|
||||
|
||||
# Parse existing custom_fields
|
||||
try:
|
||||
custom_fields = (
|
||||
json.loads(custom_fields_raw) if custom_fields_raw else {}
|
||||
)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
custom_fields = {}
|
||||
|
||||
# Track changes
|
||||
changes = []
|
||||
|
||||
# Migrate name (only if not already in custom_fields)
|
||||
if name and "name" not in custom_fields:
|
||||
custom_fields["name"] = name
|
||||
changes.append(f"name={name}")
|
||||
|
||||
# Migrate phone (only if not already in custom_fields)
|
||||
if phone and "phone" not in custom_fields:
|
||||
custom_fields["phone"] = phone
|
||||
changes.append(f"phone={phone}")
|
||||
|
||||
# Migrate address_street (only if not already in custom_fields)
|
||||
if address_street and "address_street" not in custom_fields:
|
||||
custom_fields["address_street"] = address_street
|
||||
changes.append(f"address_street={address_street[:30]}...")
|
||||
|
||||
# Migrate address_city (only if not already in custom_fields)
|
||||
if address_city and "address_city" not in custom_fields:
|
||||
custom_fields["address_city"] = address_city
|
||||
changes.append(f"address_city={address_city[:30]}...")
|
||||
|
||||
# Migrate address_zip (only if not already in custom_fields)
|
||||
if address_zip and "address_zip" not in custom_fields:
|
||||
custom_fields["address_zip"] = address_zip
|
||||
changes.append(f"address_zip={address_zip}")
|
||||
|
||||
if changes:
|
||||
# Update customer
|
||||
new_custom_fields = json.dumps(custom_fields, ensure_ascii=False)
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
UPDATE customers
|
||||
SET custom_fields = :custom_fields
|
||||
WHERE id = :id
|
||||
"""
|
||||
),
|
||||
{"custom_fields": new_custom_fields, "id": customer_id},
|
||||
)
|
||||
migrated += 1
|
||||
print(f" [OK] Customer {customer_id} ({email}): {', '.join(changes)}")
|
||||
else:
|
||||
skipped += 1
|
||||
|
||||
conn.commit()
|
||||
|
||||
print("=" * 60)
|
||||
print(
|
||||
f"Migration completed: {migrated} migrated, {skipped} skipped (already consolidated)"
|
||||
)
|
||||
|
||||
|
||||
def run_rollback():
|
||||
"""Restore fixed fields from custom_fields (for rollback)."""
|
||||
engine = get_engine()
|
||||
|
||||
print("Rolling back Sprint 12 Phase 1...")
|
||||
print("=" * 60)
|
||||
|
||||
with engine.connect() as conn:
|
||||
# Get all customers
|
||||
result = conn.execute(
|
||||
text(
|
||||
"""
|
||||
SELECT id, email, name, phone, address_street, address_city, address_zip, custom_fields
|
||||
FROM customers
|
||||
ORDER BY id
|
||||
"""
|
||||
)
|
||||
)
|
||||
customers = result.fetchall()
|
||||
|
||||
rolled_back = 0
|
||||
|
||||
for row in customers:
|
||||
customer_id = row[0]
|
||||
email = row[1]
|
||||
current_name = row[2]
|
||||
current_phone = row[3]
|
||||
current_street = row[4]
|
||||
current_city = row[5]
|
||||
current_zip = row[6]
|
||||
custom_fields_raw = row[7]
|
||||
|
||||
# Parse custom_fields
|
||||
try:
|
||||
custom_fields = (
|
||||
json.loads(custom_fields_raw) if custom_fields_raw else {}
|
||||
)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
continue
|
||||
|
||||
# Check if we have migrated data in custom_fields
|
||||
updates = {}
|
||||
|
||||
# Restore name if it differs
|
||||
if "name" in custom_fields and custom_fields["name"] != current_name:
|
||||
updates["name"] = custom_fields["name"]
|
||||
|
||||
# Restore phone if it differs
|
||||
if "phone" in custom_fields and custom_fields["phone"] != current_phone:
|
||||
updates["phone"] = custom_fields["phone"]
|
||||
|
||||
# Restore address fields
|
||||
if (
|
||||
"address_street" in custom_fields
|
||||
and custom_fields["address_street"] != current_street
|
||||
):
|
||||
updates["address_street"] = custom_fields["address_street"]
|
||||
|
||||
if (
|
||||
"address_city" in custom_fields
|
||||
and custom_fields["address_city"] != current_city
|
||||
):
|
||||
updates["address_city"] = custom_fields["address_city"]
|
||||
|
||||
if (
|
||||
"address_zip" in custom_fields
|
||||
and custom_fields["address_zip"] != current_zip
|
||||
):
|
||||
updates["address_zip"] = custom_fields["address_zip"]
|
||||
|
||||
if updates:
|
||||
# Build update query
|
||||
set_clauses = ", ".join([f"{k} = :{k}" for k in updates])
|
||||
updates["id"] = customer_id
|
||||
conn.execute(
|
||||
text(f"UPDATE customers SET {set_clauses} WHERE id = :id"), updates
|
||||
)
|
||||
rolled_back += 1
|
||||
print(
|
||||
f" [OK] Customer {customer_id} ({email}): restored {list(updates.keys())}"
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
|
||||
print("=" * 60)
|
||||
print(f"Rollback completed: {rolled_back} customers restored")
|
||||
|
||||
|
||||
def verify_migration():
|
||||
"""Verify migration by checking custom_fields content."""
|
||||
engine = get_engine()
|
||||
|
||||
print("\nVerifying migration...")
|
||||
print("=" * 60)
|
||||
|
||||
with engine.connect() as conn:
|
||||
result = conn.execute(
|
||||
text(
|
||||
"""
|
||||
SELECT id, email, custom_fields
|
||||
FROM customers
|
||||
WHERE custom_fields IS NOT NULL AND custom_fields != '{}'
|
||||
ORDER BY id
|
||||
LIMIT 10
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
for row in result:
|
||||
customer_id = row[0]
|
||||
email = row[1]
|
||||
custom_fields_raw = row[2]
|
||||
|
||||
try:
|
||||
custom_fields = json.loads(custom_fields_raw)
|
||||
has_name = "name" in custom_fields
|
||||
has_phone = "phone" in custom_fields
|
||||
has_address = "address_street" in custom_fields
|
||||
|
||||
print(f" Customer {customer_id} ({email}):")
|
||||
print(f" name: {'OK' if has_name else 'MISSING'}")
|
||||
print(f" phone: {'OK' if has_phone else 'MISSING'}")
|
||||
print(f" address: {'OK' if has_address else 'MISSING'}")
|
||||
except json.JSONDecodeError:
|
||||
print(f" Customer {customer_id} ({email}): INVALID JSON")
|
||||
|
||||
print("=" * 60)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if "--rollback" in sys.argv:
|
||||
run_rollback()
|
||||
elif "--verify" in sys.argv:
|
||||
verify_migration()
|
||||
else:
|
||||
run_migration()
|
||||
verify_migration()
|
||||
196
customer_portal/migrations/005_text_to_jsonb.py
Executable file
196
customer_portal/migrations/005_text_to_jsonb.py
Executable file
@@ -0,0 +1,196 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Migration 005: Convert custom_fields from TEXT to JSONB.
|
||||
|
||||
Sprint 12: PostgreSQL JSONB enables:
|
||||
- Native JSON operators (@>, ?, ?|, ?&, etc.)
|
||||
- GIN index for fast searching
|
||||
- Automatic JSON validation
|
||||
|
||||
Usage:
|
||||
python -m customer_portal.migrations.005_text_to_jsonb
|
||||
|
||||
The migration:
|
||||
1. Converts existing TEXT JSON to JSONB
|
||||
2. Creates GIN index for searching
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
|
||||
|
||||
|
||||
def get_database_url() -> str:
|
||||
"""Get database URL from environment or default."""
|
||||
return os.environ.get(
|
||||
"DATABASE_URL", "postgresql://portal:portal@localhost:5433/customer_portal"
|
||||
)
|
||||
|
||||
|
||||
def run_migration():
|
||||
"""Convert custom_fields from TEXT to JSONB."""
|
||||
engine = create_engine(get_database_url())
|
||||
|
||||
with engine.connect() as conn:
|
||||
# Check current column type
|
||||
result = conn.execute(
|
||||
text(
|
||||
"""
|
||||
SELECT data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'customers'
|
||||
AND column_name = 'custom_fields'
|
||||
"""
|
||||
)
|
||||
)
|
||||
row = result.fetchone()
|
||||
|
||||
if not row:
|
||||
print("Column 'custom_fields' not found in 'customers' table")
|
||||
return
|
||||
|
||||
current_type = row[0]
|
||||
print(f"Current column type: {current_type}")
|
||||
|
||||
if current_type == "jsonb":
|
||||
print("Column is already JSONB - skipping conversion")
|
||||
else:
|
||||
print("Converting TEXT to JSONB...")
|
||||
|
||||
# Step 1: Drop the default first (can't cast default automatically)
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
ALTER TABLE customers
|
||||
ALTER COLUMN custom_fields DROP DEFAULT
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# Step 2: Update NULL/empty values to valid JSON
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
UPDATE customers
|
||||
SET custom_fields = '{}'
|
||||
WHERE custom_fields IS NULL OR custom_fields = ''
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# Step 3: Convert TEXT to JSONB
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
ALTER TABLE customers
|
||||
ALTER COLUMN custom_fields
|
||||
TYPE JSONB
|
||||
USING custom_fields::jsonb
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# Step 4: Set new default and NOT NULL
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
ALTER TABLE customers
|
||||
ALTER COLUMN custom_fields
|
||||
SET DEFAULT '{}'::jsonb
|
||||
"""
|
||||
)
|
||||
)
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
ALTER TABLE customers
|
||||
ALTER COLUMN custom_fields
|
||||
SET NOT NULL
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
print("Column converted to JSONB")
|
||||
|
||||
# Check if GIN index exists
|
||||
result = conn.execute(
|
||||
text(
|
||||
"""
|
||||
SELECT indexname
|
||||
FROM pg_indexes
|
||||
WHERE tablename = 'customers'
|
||||
AND indexname = 'ix_customers_custom_fields'
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
if result.fetchone():
|
||||
print("GIN index already exists")
|
||||
else:
|
||||
print("Creating GIN index...")
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
CREATE INDEX ix_customers_custom_fields
|
||||
ON customers
|
||||
USING GIN (custom_fields)
|
||||
"""
|
||||
)
|
||||
)
|
||||
print("GIN index created")
|
||||
|
||||
conn.commit()
|
||||
print("\nMigration completed successfully!")
|
||||
|
||||
# Show example queries
|
||||
print("\n--- Example JSONB Queries ---")
|
||||
print("-- Find customers from Wien:")
|
||||
print("SELECT * FROM customers WHERE custom_fields->>'ort' = 'Wien';")
|
||||
print("")
|
||||
print("-- Find customers with phone number:")
|
||||
print("SELECT * FROM customers WHERE custom_fields ? 'telefon';")
|
||||
print("")
|
||||
print("-- Find customers with specific field value (uses GIN index):")
|
||||
print('SELECT * FROM customers WHERE custom_fields @> \'{"ort": "Wien"}\';')
|
||||
|
||||
|
||||
def rollback():
|
||||
"""Rollback: Convert JSONB back to TEXT."""
|
||||
engine = create_engine(get_database_url())
|
||||
|
||||
with engine.connect() as conn:
|
||||
print("Rolling back: Converting JSONB to TEXT...")
|
||||
|
||||
# Drop GIN index first
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
DROP INDEX IF EXISTS ix_customers_custom_fields
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# Convert JSONB to TEXT
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
ALTER TABLE customers
|
||||
ALTER COLUMN custom_fields
|
||||
TYPE TEXT
|
||||
USING custom_fields::TEXT
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
print("Rollback completed")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) > 1 and sys.argv[1] == "--rollback":
|
||||
rollback()
|
||||
else:
|
||||
run_migration()
|
||||
208
customer_portal/migrations/006_drop_legacy_columns.py
Executable file
208
customer_portal/migrations/006_drop_legacy_columns.py
Executable file
@@ -0,0 +1,208 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Migration 006: Drop legacy customer columns.
|
||||
|
||||
Sprint 12 Final Cleanup:
|
||||
All customer data is now in custom_fields (JSONB).
|
||||
Remove the deprecated fixed columns.
|
||||
|
||||
Usage:
|
||||
python -m customer_portal.migrations.006_drop_legacy_columns
|
||||
|
||||
IMPORTANT: Run migrations 004 and 005 first!
|
||||
- 004: Consolidate data into custom_fields
|
||||
- 005: Convert TEXT to JSONB
|
||||
- 006: Drop legacy columns (this migration)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
|
||||
|
||||
|
||||
# Columns to remove
|
||||
LEGACY_COLUMNS = ["name", "phone", "address_street", "address_city", "address_zip"]
|
||||
|
||||
|
||||
def get_database_url() -> str:
|
||||
"""Get database URL from environment or default."""
|
||||
return os.environ.get(
|
||||
"DATABASE_URL", "postgresql://portal:portal@localhost:5433/customer_portal"
|
||||
)
|
||||
|
||||
|
||||
def verify_data_migrated(conn) -> bool:
|
||||
"""Verify all legacy data is in custom_fields before dropping columns."""
|
||||
print("Verifying data migration...")
|
||||
|
||||
# Check for customers with legacy data not in custom_fields
|
||||
result = conn.execute(
|
||||
text(
|
||||
"""
|
||||
SELECT COUNT(*) as cnt
|
||||
FROM customers
|
||||
WHERE (
|
||||
(name IS NOT NULL AND name != '' AND
|
||||
(custom_fields->>'name' IS NULL OR custom_fields->>'name' = ''))
|
||||
OR
|
||||
(phone IS NOT NULL AND phone != '' AND
|
||||
(custom_fields->>'phone' IS NULL OR custom_fields->>'phone' = ''))
|
||||
OR
|
||||
(address_street IS NOT NULL AND address_street != '' AND
|
||||
(custom_fields->>'address_street' IS NULL OR custom_fields->>'address_street' = ''))
|
||||
OR
|
||||
(address_city IS NOT NULL AND address_city != '' AND
|
||||
(custom_fields->>'address_city' IS NULL OR custom_fields->>'address_city' = ''))
|
||||
OR
|
||||
(address_zip IS NOT NULL AND address_zip != '' AND
|
||||
(custom_fields->>'address_zip' IS NULL OR custom_fields->>'address_zip' = ''))
|
||||
)
|
||||
"""
|
||||
)
|
||||
)
|
||||
count = result.fetchone()[0]
|
||||
|
||||
if count > 0:
|
||||
print(
|
||||
f" WARNING: {count} customers have data in legacy columns not in custom_fields!"
|
||||
)
|
||||
return False
|
||||
|
||||
print(" All legacy data is in custom_fields")
|
||||
return True
|
||||
|
||||
|
||||
def migrate_remaining_data(conn) -> int:
|
||||
"""Migrate any remaining data from legacy columns to custom_fields."""
|
||||
print("Migrating any remaining legacy data...")
|
||||
|
||||
result = conn.execute(
|
||||
text(
|
||||
"""
|
||||
UPDATE customers
|
||||
SET custom_fields = custom_fields ||
|
||||
jsonb_strip_nulls(jsonb_build_object(
|
||||
'name', CASE WHEN name != '' AND (custom_fields->>'name' IS NULL OR custom_fields->>'name' = '') THEN name ELSE NULL END,
|
||||
'phone', CASE WHEN phone != '' AND (custom_fields->>'phone' IS NULL OR custom_fields->>'phone' = '') THEN phone ELSE NULL END,
|
||||
'address_street', CASE WHEN address_street != '' AND (custom_fields->>'address_street' IS NULL OR custom_fields->>'address_street' = '') THEN address_street ELSE NULL END,
|
||||
'address_city', CASE WHEN address_city != '' AND (custom_fields->>'address_city' IS NULL OR custom_fields->>'address_city' = '') THEN address_city ELSE NULL END,
|
||||
'address_zip', CASE WHEN address_zip != '' AND (custom_fields->>'address_zip' IS NULL OR custom_fields->>'address_zip' = '') THEN address_zip ELSE NULL END
|
||||
))
|
||||
WHERE name IS NOT NULL OR phone IS NOT NULL OR address_street IS NOT NULL
|
||||
OR address_city IS NOT NULL OR address_zip IS NOT NULL
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
count = result.rowcount
|
||||
print(f" Updated {count} customers")
|
||||
return count
|
||||
|
||||
|
||||
def check_columns_exist(conn) -> list[str]:
|
||||
"""Check which legacy columns still exist."""
|
||||
result = conn.execute(
|
||||
text(
|
||||
"""
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'customers'
|
||||
AND column_name = ANY(:columns)
|
||||
"""
|
||||
),
|
||||
{"columns": LEGACY_COLUMNS},
|
||||
)
|
||||
|
||||
return [row[0] for row in result.fetchall()]
|
||||
|
||||
|
||||
def run_migration():
|
||||
"""Drop legacy columns after verifying data migration."""
|
||||
engine = create_engine(get_database_url())
|
||||
|
||||
with engine.connect() as conn:
|
||||
# Check which columns exist
|
||||
existing = check_columns_exist(conn)
|
||||
if not existing:
|
||||
print("No legacy columns found - nothing to do")
|
||||
return
|
||||
|
||||
print(f"Found legacy columns: {existing}")
|
||||
|
||||
# First, migrate any remaining data
|
||||
migrate_remaining_data(conn)
|
||||
conn.commit()
|
||||
|
||||
# Verify all data is migrated
|
||||
if not verify_data_migrated(conn):
|
||||
print("\nERROR: Data verification failed!")
|
||||
print("Run migration 004 first to consolidate data.")
|
||||
return
|
||||
|
||||
# Drop columns
|
||||
print("\nDropping legacy columns...")
|
||||
for col in existing:
|
||||
print(f" Dropping: {col}")
|
||||
conn.execute(text(f"ALTER TABLE customers DROP COLUMN IF EXISTS {col}"))
|
||||
|
||||
conn.commit()
|
||||
|
||||
print("\nMigration completed successfully!")
|
||||
print("\nCustomer table now has only:")
|
||||
print(" - id (PK)")
|
||||
print(" - email (unique identifier)")
|
||||
print(" - custom_fields (JSONB - all customer data)")
|
||||
print(" - wp_user_id")
|
||||
print(" - email_* preferences")
|
||||
print(" - is_admin")
|
||||
print(" - created_at, updated_at, last_login_at")
|
||||
|
||||
|
||||
def rollback():
|
||||
"""Rollback: Re-add legacy columns (data will be empty)."""
|
||||
engine = create_engine(get_database_url())
|
||||
|
||||
with engine.connect() as conn:
|
||||
print("Rolling back: Re-adding legacy columns...")
|
||||
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
ALTER TABLE customers
|
||||
ADD COLUMN IF NOT EXISTS name VARCHAR(255) NOT NULL DEFAULT '',
|
||||
ADD COLUMN IF NOT EXISTS phone VARCHAR(50),
|
||||
ADD COLUMN IF NOT EXISTS address_street VARCHAR(255),
|
||||
ADD COLUMN IF NOT EXISTS address_city VARCHAR(255),
|
||||
ADD COLUMN IF NOT EXISTS address_zip VARCHAR(20)
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# Populate from custom_fields
|
||||
print("Populating columns from custom_fields...")
|
||||
conn.execute(
|
||||
text(
|
||||
"""
|
||||
UPDATE customers
|
||||
SET
|
||||
name = COALESCE(custom_fields->>'name', ''),
|
||||
phone = custom_fields->>'phone',
|
||||
address_street = custom_fields->>'address_street',
|
||||
address_city = custom_fields->>'address_city',
|
||||
address_zip = custom_fields->>'address_zip'
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
print("Rollback completed")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) > 1 and sys.argv[1] == "--rollback":
|
||||
rollback()
|
||||
else:
|
||||
run_migration()
|
||||
7
customer_portal/migrations/__init__.py
Executable file
7
customer_portal/migrations/__init__.py
Executable file
@@ -0,0 +1,7 @@
|
||||
"""Database migrations for Customer Portal.
|
||||
|
||||
Sprint 6.6: Added custom_fields migration for WordPress sync.
|
||||
|
||||
Run migrations:
|
||||
python -m customer_portal.migrations.001_add_custom_fields
|
||||
"""
|
||||
BIN
customer_portal/migrations/__pycache__/001_add_custom_fields.cpython-311.pyc
Executable file
BIN
customer_portal/migrations/__pycache__/001_add_custom_fields.cpython-311.pyc
Executable file
Binary file not shown.
BIN
customer_portal/migrations/__pycache__/001_add_custom_fields.cpython-312.pyc
Executable file
BIN
customer_portal/migrations/__pycache__/001_add_custom_fields.cpython-312.pyc
Executable file
Binary file not shown.
Binary file not shown.
BIN
customer_portal/migrations/__pycache__/003_add_email_preferences.cpython-312.pyc
Executable file
BIN
customer_portal/migrations/__pycache__/003_add_email_preferences.cpython-312.pyc
Executable file
Binary file not shown.
Binary file not shown.
BIN
customer_portal/migrations/__pycache__/005_text_to_jsonb.cpython-311.pyc
Executable file
BIN
customer_portal/migrations/__pycache__/005_text_to_jsonb.cpython-311.pyc
Executable file
Binary file not shown.
BIN
customer_portal/migrations/__pycache__/005_text_to_jsonb.cpython-312.pyc
Executable file
BIN
customer_portal/migrations/__pycache__/005_text_to_jsonb.cpython-312.pyc
Executable file
Binary file not shown.
BIN
customer_portal/migrations/__pycache__/006_drop_legacy_columns.cpython-311.pyc
Executable file
BIN
customer_portal/migrations/__pycache__/006_drop_legacy_columns.cpython-311.pyc
Executable file
Binary file not shown.
BIN
customer_portal/migrations/__pycache__/006_drop_legacy_columns.cpython-312.pyc
Executable file
BIN
customer_portal/migrations/__pycache__/006_drop_legacy_columns.cpython-312.pyc
Executable file
Binary file not shown.
BIN
customer_portal/migrations/__pycache__/__init__.cpython-312.pyc
Executable file
BIN
customer_portal/migrations/__pycache__/__init__.cpython-312.pyc
Executable file
Binary file not shown.
Reference in New Issue
Block a user