This guide helps you migrate your existing Aushadham data from SQLite/PostgreSQL to Supabase.
- Existing Aushadham database with data
- Supabase project set up (see SUPABASE_SETUP.md)
- Python environment with required packages
There are two approaches to migrate your data:
Best for databases with less than 1000 records.
# export_data.py
import sqlite3
import json
def export_database(db_path='aushadham.db'):
"""Export all data from SQLite to JSON"""
conn = sqlite3.connect(db_path)
conn.row_factory = sqlite3.Row
cursor = conn.cursor()
data = {}
# Export users
cursor.execute("SELECT * FROM users")
data['users'] = [dict(row) for row in cursor.fetchall()]
# Export questionnaires
cursor.execute("SELECT * FROM saved_questionnaires")
data['questionnaires'] = [dict(row) for row in cursor.fetchall()]
# Export feedback
cursor.execute("SELECT * FROM user_feedback")
data['feedback'] = [dict(row) for row in cursor.fetchall()]
conn.close()
# Save to JSON
with open('database_export.json', 'w') as f:
json.dump(data, f, indent=2, default=str)
print(f"Exported {len(data['users'])} users")
print(f"Exported {len(data['questionnaires'])} questionnaires")
print(f"Exported {len(data['feedback'])} feedback entries")
if __name__ == '__main__':
export_database()Run the export:
python export_data.py# import_data.py
import json
import os
from dotenv import load_dotenv
from supabase_service import SupabaseService
load_dotenv()
def import_database(json_file='database_export.json'):
"""Import data from JSON to Supabase"""
# Initialize Supabase
service = SupabaseService(
os.getenv('SUPABASE_URL'),
os.getenv('SUPABASE_KEY')
)
# Load data
with open(json_file, 'r') as f:
data = json.load(f)
print("Starting import...")
# Import users (preserve IDs)
for user in data['users']:
try:
# Insert with specific ID
user_data = {
'id': user['id'],
'username': user['username'],
'email': user['email'],
'password_hash': user['password_hash'],
'full_name': user.get('full_name'),
'phone': user.get('phone'),
'created_at': user['created_at']
}
service.client.table('users').insert(user_data).execute()
print(f"✓ Imported user: {user['username']}")
except Exception as e:
print(f"✗ Failed to import user {user['username']}: {e}")
# Import questionnaires
for quest in data['questionnaires']:
try:
quest_data = {
'id': quest['id'],
'user_id': quest['user_id'],
'session_id': quest['session_id'],
'symptom': quest['symptom'],
'initial_description': quest.get('initial_description'),
'answers': json.loads(quest['answers']) if isinstance(quest['answers'], str) else quest['answers'],
'report': json.loads(quest['report']) if isinstance(quest['report'], str) else quest['report'],
'severity': quest.get('severity'),
'created_at': quest['created_at']
}
service.client.table('saved_questionnaires').insert(quest_data).execute()
print(f"✓ Imported questionnaire: {quest['session_id']}")
except Exception as e:
print(f"✗ Failed to import questionnaire {quest['session_id']}: {e}")
# Import feedback
for fb in data['feedback']:
try:
fb_data = {
'id': fb['id'],
'user_id': fb['user_id'],
'questionnaire_id': fb.get('questionnaire_id'),
'rating': fb.get('rating'),
'comment': fb.get('comment'),
'feedback_type': fb.get('feedback_type'),
'created_at': fb['created_at']
}
service.client.table('user_feedback').insert(fb_data).execute()
print(f"✓ Imported feedback: {fb['id']}")
except Exception as e:
print(f"✗ Failed to import feedback {fb['id']}: {e}")
print("\nImport complete!")
if __name__ == '__main__':
import_database()Run the import:
python import_data.pyBest for databases with 1000+ records.
pip install sqlalchemy-migrate# migrate_direct.py
import os
from dotenv import load_dotenv
from sqlalchemy import create_engine, MetaData
from supabase_service import SupabaseService
load_dotenv()
def migrate_table(source_engine, dest_service, table_name, id_col='id'):
"""Migrate a single table"""
print(f"\nMigrating {table_name}...")
# Read from source
with source_engine.connect() as conn:
result = conn.execute(f"SELECT * FROM {table_name}")
rows = result.fetchall()
columns = result.keys()
# Write to destination
success = 0
failed = 0
for row in rows:
try:
data = dict(zip(columns, row))
dest_service.client.table(table_name).insert(data).execute()
success += 1
except Exception as e:
print(f"Failed to migrate row {data.get(id_col)}: {e}")
failed += 1
print(f"✓ Migrated {success} rows, {failed} failed")
def main():
# Connect to source database
source_url = os.getenv('DATABASE_URL', 'sqlite:///aushadham.db')
source_engine = create_engine(source_url)
# Connect to Supabase
dest_service = SupabaseService(
os.getenv('SUPABASE_URL'),
os.getenv('SUPABASE_KEY')
)
# Migrate tables
migrate_table(source_engine, dest_service, 'users')
migrate_table(source_engine, dest_service, 'saved_questionnaires')
migrate_table(source_engine, dest_service, 'user_feedback')
print("\n✓ Migration complete!")
if __name__ == '__main__':
main()Run the migration:
python migrate_direct.py-
Verify Data Integrity
- Check row counts match
- Verify foreign key relationships
- Test authentication with existing users
-
Test Application
# Set Supabase environment export USE_SUPABASE=true # Run tests python test_api.py
-
Backup Old Database
cp aushadham.db aushadham.db.backup
-
Update Environment Update your
.envfile:USE_SUPABASE=true SUPABASE_URL=your-url SUPABASE_KEY=your-key
-
Monitor Performance
- Check Supabase dashboard for query performance
- Monitor API response times
- Watch for any errors in logs
If you need to rollback to SQLite:
-
Set environment:
USE_SUPABASE=false DATABASE_URL=sqlite:///aushadham.db
-
Restore backup if needed:
cp aushadham.db.backup aushadham.db
-
Restart application
Problem: Auto-increment IDs don't match after import
Solution: Reset PostgreSQL sequences in Supabase SQL Editor:
SELECT setval('users_id_seq', (SELECT MAX(id) FROM users));
SELECT setval('saved_questionnaires_id_seq', (SELECT MAX(id) FROM saved_questionnaires));
SELECT setval('user_feedback_id_seq', (SELECT MAX(id) FROM user_feedback));Problem: JSON fields not importing correctly
Solution: Ensure JSON is properly formatted:
import json
# Convert string to JSON
if isinstance(data['answers'], str):
data['answers'] = json.loads(data['answers'])Problem: Child records reference non-existent parent records
Solution:
- Import users first
- Import questionnaires second
- Import feedback last
Problem: Records with same ID already exist
Solution: Clear Supabase tables before import:
TRUNCATE users, saved_questionnaires, user_feedback CASCADE;- Batch Inserts: For large datasets, insert in batches of 100-1000 records
- Disable Triggers: Temporarily disable triggers during migration if needed
- Parallel Processing: Use Python's
multiprocessingfor very large datasets - Monitor Progress: Add progress bars using
tqdm:from tqdm import tqdm for row in tqdm(rows): # import row
Run comprehensive tests after migration:
# Test with Supabase backend
export USE_SUPABASE=true
python test_api.py
# Run integration tests
python test_supabase_integration.py- Check Supabase logs: Dashboard → Logs → Postgres Logs
- Review application logs for errors
- Test specific endpoints with curl
- Consult SUPABASE_SETUP.md for troubleshooting
After successful migration:
- Set up regular backups in Supabase
- Configure monitoring and alerts
- Review and optimize database indexes
- Consider enabling Point-in-Time Recovery (PITR)
- Update deployment configurations
Congratulations! Your data is now in Supabase. 🎉