- FastAPI backend with SQLModel, Alembic migrations, AgentScope agents - Next.js 15 frontend with React 19, Tailwind, Zustand, React Flow - Multi-provider AI system (DashScope, Kling, MiniMax, Volcengine, OpenAI, etc.) - All HTTP clients migrated from sync requests to async httpx - Admin-managed API keys via environment variables - SSRF vulnerability fixed in ensure_url()
340 lines
13 KiB
Python
340 lines
13 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Migration Verification Script for Canvas Metadata Table
|
|
|
|
This script verifies that the canvas_metadata migration was successful by:
|
|
1. Checking that the canvas_metadata table exists
|
|
2. Verifying all required columns exist with correct types
|
|
3. Checking that all indexes were created
|
|
4. Validating data migration from general_canvases
|
|
5. Validating data migration from asset canvases
|
|
6. Validating data migration from storyboard canvases
|
|
7. Checking data integrity and consistency
|
|
"""
|
|
|
|
import sys
|
|
import os
|
|
from sqlalchemy import create_engine, inspect, text
|
|
from sqlalchemy.engine import Engine
|
|
import json
|
|
|
|
# Add backend directory to sys.path
|
|
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
|
|
|
|
from backend.src.config.settings import DB_PATH
|
|
|
|
|
|
class MigrationVerifier:
|
|
def __init__(self, db_path: str):
|
|
self.db_path = db_path
|
|
self.engine = create_engine(f"sqlite:///{db_path}")
|
|
self.inspector = inspect(self.engine)
|
|
self.errors = []
|
|
self.warnings = []
|
|
self.success_count = 0
|
|
self.total_checks = 0
|
|
|
|
def check(self, condition: bool, success_msg: str, error_msg: str):
|
|
"""Helper method to track check results"""
|
|
self.total_checks += 1
|
|
if condition:
|
|
self.success_count += 1
|
|
print(f"✅ {success_msg}")
|
|
else:
|
|
self.errors.append(error_msg)
|
|
print(f"❌ {error_msg}")
|
|
|
|
def warn(self, message: str):
|
|
"""Helper method to track warnings"""
|
|
self.warnings.append(message)
|
|
print(f"⚠️ {message}")
|
|
|
|
def verify_table_exists(self) -> bool:
|
|
"""Verify that canvas_metadata table exists"""
|
|
print("\n=== Checking Table Existence ===")
|
|
tables = self.inspector.get_table_names()
|
|
self.check(
|
|
'canvas_metadata' in tables,
|
|
"canvas_metadata table exists",
|
|
"canvas_metadata table does not exist"
|
|
)
|
|
return 'canvas_metadata' in tables
|
|
|
|
def verify_columns(self) -> bool:
|
|
"""Verify all required columns exist with correct types"""
|
|
print("\n=== Checking Columns ===")
|
|
|
|
required_columns = {
|
|
'id': 'VARCHAR',
|
|
'project_id': 'VARCHAR',
|
|
'canvas_type': 'VARCHAR',
|
|
'related_entity_type': 'VARCHAR',
|
|
'related_entity_id': 'VARCHAR',
|
|
'name': 'VARCHAR',
|
|
'description': 'VARCHAR',
|
|
'order_index': 'INTEGER',
|
|
'is_pinned': 'BOOLEAN',
|
|
'tags': 'JSON',
|
|
'node_count': 'INTEGER',
|
|
'last_accessed_at': 'FLOAT',
|
|
'access_count': 'INTEGER',
|
|
'created_at': 'FLOAT',
|
|
'updated_at': 'FLOAT',
|
|
'deleted_at': 'FLOAT',
|
|
'legacy_id': 'VARCHAR'
|
|
}
|
|
|
|
columns = self.inspector.get_columns('canvas_metadata')
|
|
column_dict = {col['name']: col for col in columns}
|
|
|
|
all_columns_exist = True
|
|
for col_name, expected_type in required_columns.items():
|
|
if col_name in column_dict:
|
|
col_type = str(column_dict[col_name]['type']).upper()
|
|
# SQLite stores JSON as TEXT, so we need to check for that
|
|
if expected_type == 'JSON' and 'TEXT' in col_type:
|
|
print(f"✅ Column '{col_name}' exists with type {col_type} (JSON stored as TEXT)")
|
|
elif expected_type in col_type or col_type in expected_type:
|
|
print(f"✅ Column '{col_name}' exists with type {col_type}")
|
|
else:
|
|
self.warn(f"Column '{col_name}' exists but type is {col_type}, expected {expected_type}")
|
|
else:
|
|
all_columns_exist = False
|
|
self.errors.append(f"Column '{col_name}' is missing")
|
|
print(f"❌ Column '{col_name}' is missing")
|
|
|
|
return all_columns_exist
|
|
|
|
def verify_indexes(self) -> bool:
|
|
"""Verify all required indexes exist"""
|
|
print("\n=== Checking Indexes ===")
|
|
|
|
required_indexes = [
|
|
'ix_canvas_metadata_project_id',
|
|
'ix_canvas_metadata_canvas_type',
|
|
'ix_canvas_metadata_related_entity_id',
|
|
'ix_canvas_metadata_legacy_id',
|
|
'ix_canvas_metadata_project_type',
|
|
'ix_canvas_metadata_type_entity'
|
|
]
|
|
|
|
indexes = self.inspector.get_indexes('canvas_metadata')
|
|
index_names = [idx['name'] for idx in indexes]
|
|
|
|
all_indexes_exist = True
|
|
for idx_name in required_indexes:
|
|
if idx_name in index_names:
|
|
print(f"✅ Index '{idx_name}' exists")
|
|
else:
|
|
all_indexes_exist = False
|
|
self.errors.append(f"Index '{idx_name}' is missing")
|
|
print(f"❌ Index '{idx_name}' is missing")
|
|
|
|
return all_indexes_exist
|
|
|
|
def verify_foreign_keys(self) -> bool:
|
|
"""Verify foreign key constraints"""
|
|
print("\n=== Checking Foreign Keys ===")
|
|
|
|
fks = self.inspector.get_foreign_keys('canvas_metadata')
|
|
|
|
has_project_fk = any(
|
|
fk['referred_table'] == 'projects' and 'project_id' in fk['constrained_columns']
|
|
for fk in fks
|
|
)
|
|
|
|
self.check(
|
|
has_project_fk,
|
|
"Foreign key to projects table exists",
|
|
"Foreign key to projects table is missing"
|
|
)
|
|
|
|
return has_project_fk
|
|
|
|
def verify_data_migration(self) -> bool:
|
|
"""Verify data was migrated correctly"""
|
|
print("\n=== Checking Data Migration ===")
|
|
|
|
with self.engine.connect() as conn:
|
|
# Check if any canvas_metadata records exist
|
|
result = conn.execute(text("SELECT COUNT(*) FROM canvas_metadata"))
|
|
count = result.scalar()
|
|
|
|
if count > 0:
|
|
print(f"✅ Found {count} canvas metadata records")
|
|
|
|
# Check general canvases
|
|
result = conn.execute(text(
|
|
"SELECT COUNT(*) FROM canvas_metadata WHERE canvas_type = 'general'"
|
|
))
|
|
general_count = result.scalar()
|
|
print(f" - General canvases: {general_count}")
|
|
|
|
# Check asset canvases
|
|
result = conn.execute(text(
|
|
"SELECT COUNT(*) FROM canvas_metadata WHERE canvas_type = 'asset'"
|
|
))
|
|
asset_count = result.scalar()
|
|
print(f" - Asset canvases: {asset_count}")
|
|
|
|
# Check storyboard canvases
|
|
result = conn.execute(text(
|
|
"SELECT COUNT(*) FROM canvas_metadata WHERE canvas_type = 'storyboard'"
|
|
))
|
|
storyboard_count = result.scalar()
|
|
print(f" - Storyboard canvases: {storyboard_count}")
|
|
|
|
# Verify legacy_id mapping for migrated canvases
|
|
result = conn.execute(text(
|
|
"SELECT COUNT(*) FROM canvas_metadata WHERE legacy_id IS NOT NULL"
|
|
))
|
|
legacy_count = result.scalar()
|
|
if legacy_count > 0:
|
|
print(f"✅ Found {legacy_count} canvases with legacy_id mapping")
|
|
|
|
return True
|
|
else:
|
|
self.warn("No canvas metadata records found (this is OK if database is empty)")
|
|
return True
|
|
|
|
def verify_data_integrity(self) -> bool:
|
|
"""Verify data integrity constraints"""
|
|
print("\n=== Checking Data Integrity ===")
|
|
|
|
with self.engine.connect() as conn:
|
|
# Check if canvases table exists
|
|
tables = self.inspector.get_table_names()
|
|
if 'canvases' not in tables:
|
|
self.warn("canvases table does not exist yet - skipping canvas content check")
|
|
orphaned_metadata = 0
|
|
else:
|
|
# Check that all canvas_metadata records have corresponding canvas content
|
|
result = conn.execute(text("""
|
|
SELECT COUNT(*)
|
|
FROM canvas_metadata cm
|
|
LEFT JOIN canvases c ON cm.id = c.id
|
|
WHERE c.id IS NULL
|
|
"""))
|
|
orphaned_metadata = result.scalar()
|
|
|
|
self.check(
|
|
orphaned_metadata == 0,
|
|
f"All canvas metadata records have corresponding canvas content",
|
|
f"Found {orphaned_metadata} canvas metadata records without canvas content"
|
|
)
|
|
|
|
# Check that related_entity_id is set for asset and storyboard canvases
|
|
result = conn.execute(text("""
|
|
SELECT COUNT(*)
|
|
FROM canvas_metadata
|
|
WHERE canvas_type IN ('asset', 'storyboard')
|
|
AND related_entity_id IS NULL
|
|
"""))
|
|
missing_entity_id = result.scalar()
|
|
|
|
self.check(
|
|
missing_entity_id == 0,
|
|
"All asset/storyboard canvases have related_entity_id",
|
|
f"Found {missing_entity_id} asset/storyboard canvases without related_entity_id"
|
|
)
|
|
|
|
# Check that general canvases don't have related_entity_id
|
|
result = conn.execute(text("""
|
|
SELECT COUNT(*)
|
|
FROM canvas_metadata
|
|
WHERE canvas_type = 'general'
|
|
AND related_entity_id IS NOT NULL
|
|
"""))
|
|
invalid_general = result.scalar()
|
|
|
|
self.check(
|
|
invalid_general == 0,
|
|
"General canvases don't have related_entity_id",
|
|
f"Found {invalid_general} general canvases with related_entity_id"
|
|
)
|
|
|
|
return orphaned_metadata == 0 and missing_entity_id == 0 and invalid_general == 0
|
|
|
|
def verify_project_relationship(self) -> bool:
|
|
"""Verify that ProjectDB relationship is working"""
|
|
print("\n=== Checking Project Relationship ===")
|
|
|
|
with self.engine.connect() as conn:
|
|
# Check that all canvas_metadata records reference valid projects
|
|
result = conn.execute(text("""
|
|
SELECT COUNT(*)
|
|
FROM canvas_metadata cm
|
|
LEFT JOIN projects p ON cm.project_id = p.id
|
|
WHERE p.id IS NULL
|
|
"""))
|
|
orphaned_canvases = result.scalar()
|
|
|
|
self.check(
|
|
orphaned_canvases == 0,
|
|
"All canvas metadata records reference valid projects",
|
|
f"Found {orphaned_canvases} canvas metadata records with invalid project_id"
|
|
)
|
|
|
|
return orphaned_canvases == 0
|
|
|
|
def run_all_checks(self) -> bool:
|
|
"""Run all verification checks"""
|
|
print("=" * 60)
|
|
print("Canvas Metadata Migration Verification")
|
|
print("=" * 60)
|
|
print(f"Database: {self.db_path}")
|
|
|
|
if not os.path.exists(self.db_path):
|
|
print(f"\n❌ Database file does not exist: {self.db_path}")
|
|
return False
|
|
|
|
# Run all checks
|
|
table_exists = self.verify_table_exists()
|
|
if not table_exists:
|
|
print("\n❌ Cannot continue verification - table does not exist")
|
|
return False
|
|
|
|
self.verify_columns()
|
|
self.verify_indexes()
|
|
self.verify_foreign_keys()
|
|
self.verify_data_migration()
|
|
self.verify_data_integrity()
|
|
self.verify_project_relationship()
|
|
|
|
# Print summary
|
|
print("\n" + "=" * 60)
|
|
print("Verification Summary")
|
|
print("=" * 60)
|
|
print(f"Total checks: {self.total_checks}")
|
|
print(f"Passed: {self.success_count}")
|
|
print(f"Failed: {len(self.errors)}")
|
|
print(f"Warnings: {len(self.warnings)}")
|
|
|
|
if self.errors:
|
|
print("\n❌ Errors:")
|
|
for error in self.errors:
|
|
print(f" - {error}")
|
|
|
|
if self.warnings:
|
|
print("\n⚠️ Warnings:")
|
|
for warning in self.warnings:
|
|
print(f" - {warning}")
|
|
|
|
if len(self.errors) == 0:
|
|
print("\n✅ All checks passed! Migration is successful.")
|
|
return True
|
|
else:
|
|
print("\n❌ Migration verification failed. Please review the errors above.")
|
|
return False
|
|
|
|
|
|
def main():
|
|
"""Main entry point"""
|
|
verifier = MigrationVerifier(DB_PATH)
|
|
success = verifier.run_all_checks()
|
|
sys.exit(0 if success else 1)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|