Initial commit: Pixel AI comic/video creation platform
- FastAPI backend with SQLModel, Alembic migrations, AgentScope agents - Next.js 15 frontend with React 19, Tailwind, Zustand, React Flow - Multi-provider AI system (DashScope, Kling, MiniMax, Volcengine, OpenAI, etc.) - All HTTP clients migrated from sync requests to async httpx - Admin-managed API keys via environment variables - SSRF vulnerability fixed in ensure_url()
This commit is contained in:
1
backend/alembic/README
Normal file
1
backend/alembic/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
94
backend/alembic/env.py
Normal file
94
backend/alembic/env.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import sys
|
||||
import os
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
from sqlmodel import SQLModel
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add backend directory to sys.path so we can import src
|
||||
# Add project root to sys.path to allow importing backend
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
|
||||
|
||||
# Import models
|
||||
from backend.src.models.entities import *
|
||||
from backend.src.models.session import *
|
||||
from backend.src.config.settings import DB_PATH, DATABASE_URL
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Override sqlalchemy.url with the one from config
|
||||
if DATABASE_URL:
|
||||
config.set_main_option("sqlalchemy.url", DATABASE_URL)
|
||||
else:
|
||||
config.set_main_option("sqlalchemy.url", f"sqlite:///{DB_PATH}")
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = SQLModel.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
backend/alembic/script.py.mako
Normal file
28
backend/alembic/script.py.mako
Normal file
@@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
339
backend/alembic/verify_migration.py
Normal file
339
backend/alembic/verify_migration.py
Normal file
@@ -0,0 +1,339 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Migration Verification Script for Canvas Metadata Table
|
||||
|
||||
This script verifies that the canvas_metadata migration was successful by:
|
||||
1. Checking that the canvas_metadata table exists
|
||||
2. Verifying all required columns exist with correct types
|
||||
3. Checking that all indexes were created
|
||||
4. Validating data migration from general_canvases
|
||||
5. Validating data migration from asset canvases
|
||||
6. Validating data migration from storyboard canvases
|
||||
7. Checking data integrity and consistency
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from sqlalchemy import create_engine, inspect, text
|
||||
from sqlalchemy.engine import Engine
|
||||
import json
|
||||
|
||||
# Add backend directory to sys.path
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
|
||||
|
||||
from backend.src.config.settings import DB_PATH
|
||||
|
||||
|
||||
class MigrationVerifier:
|
||||
def __init__(self, db_path: str):
|
||||
self.db_path = db_path
|
||||
self.engine = create_engine(f"sqlite:///{db_path}")
|
||||
self.inspector = inspect(self.engine)
|
||||
self.errors = []
|
||||
self.warnings = []
|
||||
self.success_count = 0
|
||||
self.total_checks = 0
|
||||
|
||||
def check(self, condition: bool, success_msg: str, error_msg: str):
|
||||
"""Helper method to track check results"""
|
||||
self.total_checks += 1
|
||||
if condition:
|
||||
self.success_count += 1
|
||||
print(f"✅ {success_msg}")
|
||||
else:
|
||||
self.errors.append(error_msg)
|
||||
print(f"❌ {error_msg}")
|
||||
|
||||
def warn(self, message: str):
|
||||
"""Helper method to track warnings"""
|
||||
self.warnings.append(message)
|
||||
print(f"⚠️ {message}")
|
||||
|
||||
def verify_table_exists(self) -> bool:
|
||||
"""Verify that canvas_metadata table exists"""
|
||||
print("\n=== Checking Table Existence ===")
|
||||
tables = self.inspector.get_table_names()
|
||||
self.check(
|
||||
'canvas_metadata' in tables,
|
||||
"canvas_metadata table exists",
|
||||
"canvas_metadata table does not exist"
|
||||
)
|
||||
return 'canvas_metadata' in tables
|
||||
|
||||
def verify_columns(self) -> bool:
|
||||
"""Verify all required columns exist with correct types"""
|
||||
print("\n=== Checking Columns ===")
|
||||
|
||||
required_columns = {
|
||||
'id': 'VARCHAR',
|
||||
'project_id': 'VARCHAR',
|
||||
'canvas_type': 'VARCHAR',
|
||||
'related_entity_type': 'VARCHAR',
|
||||
'related_entity_id': 'VARCHAR',
|
||||
'name': 'VARCHAR',
|
||||
'description': 'VARCHAR',
|
||||
'order_index': 'INTEGER',
|
||||
'is_pinned': 'BOOLEAN',
|
||||
'tags': 'JSON',
|
||||
'node_count': 'INTEGER',
|
||||
'last_accessed_at': 'FLOAT',
|
||||
'access_count': 'INTEGER',
|
||||
'created_at': 'FLOAT',
|
||||
'updated_at': 'FLOAT',
|
||||
'deleted_at': 'FLOAT',
|
||||
'legacy_id': 'VARCHAR'
|
||||
}
|
||||
|
||||
columns = self.inspector.get_columns('canvas_metadata')
|
||||
column_dict = {col['name']: col for col in columns}
|
||||
|
||||
all_columns_exist = True
|
||||
for col_name, expected_type in required_columns.items():
|
||||
if col_name in column_dict:
|
||||
col_type = str(column_dict[col_name]['type']).upper()
|
||||
# SQLite stores JSON as TEXT, so we need to check for that
|
||||
if expected_type == 'JSON' and 'TEXT' in col_type:
|
||||
print(f"✅ Column '{col_name}' exists with type {col_type} (JSON stored as TEXT)")
|
||||
elif expected_type in col_type or col_type in expected_type:
|
||||
print(f"✅ Column '{col_name}' exists with type {col_type}")
|
||||
else:
|
||||
self.warn(f"Column '{col_name}' exists but type is {col_type}, expected {expected_type}")
|
||||
else:
|
||||
all_columns_exist = False
|
||||
self.errors.append(f"Column '{col_name}' is missing")
|
||||
print(f"❌ Column '{col_name}' is missing")
|
||||
|
||||
return all_columns_exist
|
||||
|
||||
def verify_indexes(self) -> bool:
|
||||
"""Verify all required indexes exist"""
|
||||
print("\n=== Checking Indexes ===")
|
||||
|
||||
required_indexes = [
|
||||
'ix_canvas_metadata_project_id',
|
||||
'ix_canvas_metadata_canvas_type',
|
||||
'ix_canvas_metadata_related_entity_id',
|
||||
'ix_canvas_metadata_legacy_id',
|
||||
'ix_canvas_metadata_project_type',
|
||||
'ix_canvas_metadata_type_entity'
|
||||
]
|
||||
|
||||
indexes = self.inspector.get_indexes('canvas_metadata')
|
||||
index_names = [idx['name'] for idx in indexes]
|
||||
|
||||
all_indexes_exist = True
|
||||
for idx_name in required_indexes:
|
||||
if idx_name in index_names:
|
||||
print(f"✅ Index '{idx_name}' exists")
|
||||
else:
|
||||
all_indexes_exist = False
|
||||
self.errors.append(f"Index '{idx_name}' is missing")
|
||||
print(f"❌ Index '{idx_name}' is missing")
|
||||
|
||||
return all_indexes_exist
|
||||
|
||||
def verify_foreign_keys(self) -> bool:
|
||||
"""Verify foreign key constraints"""
|
||||
print("\n=== Checking Foreign Keys ===")
|
||||
|
||||
fks = self.inspector.get_foreign_keys('canvas_metadata')
|
||||
|
||||
has_project_fk = any(
|
||||
fk['referred_table'] == 'projects' and 'project_id' in fk['constrained_columns']
|
||||
for fk in fks
|
||||
)
|
||||
|
||||
self.check(
|
||||
has_project_fk,
|
||||
"Foreign key to projects table exists",
|
||||
"Foreign key to projects table is missing"
|
||||
)
|
||||
|
||||
return has_project_fk
|
||||
|
||||
def verify_data_migration(self) -> bool:
|
||||
"""Verify data was migrated correctly"""
|
||||
print("\n=== Checking Data Migration ===")
|
||||
|
||||
with self.engine.connect() as conn:
|
||||
# Check if any canvas_metadata records exist
|
||||
result = conn.execute(text("SELECT COUNT(*) FROM canvas_metadata"))
|
||||
count = result.scalar()
|
||||
|
||||
if count > 0:
|
||||
print(f"✅ Found {count} canvas metadata records")
|
||||
|
||||
# Check general canvases
|
||||
result = conn.execute(text(
|
||||
"SELECT COUNT(*) FROM canvas_metadata WHERE canvas_type = 'general'"
|
||||
))
|
||||
general_count = result.scalar()
|
||||
print(f" - General canvases: {general_count}")
|
||||
|
||||
# Check asset canvases
|
||||
result = conn.execute(text(
|
||||
"SELECT COUNT(*) FROM canvas_metadata WHERE canvas_type = 'asset'"
|
||||
))
|
||||
asset_count = result.scalar()
|
||||
print(f" - Asset canvases: {asset_count}")
|
||||
|
||||
# Check storyboard canvases
|
||||
result = conn.execute(text(
|
||||
"SELECT COUNT(*) FROM canvas_metadata WHERE canvas_type = 'storyboard'"
|
||||
))
|
||||
storyboard_count = result.scalar()
|
||||
print(f" - Storyboard canvases: {storyboard_count}")
|
||||
|
||||
# Verify legacy_id mapping for migrated canvases
|
||||
result = conn.execute(text(
|
||||
"SELECT COUNT(*) FROM canvas_metadata WHERE legacy_id IS NOT NULL"
|
||||
))
|
||||
legacy_count = result.scalar()
|
||||
if legacy_count > 0:
|
||||
print(f"✅ Found {legacy_count} canvases with legacy_id mapping")
|
||||
|
||||
return True
|
||||
else:
|
||||
self.warn("No canvas metadata records found (this is OK if database is empty)")
|
||||
return True
|
||||
|
||||
def verify_data_integrity(self) -> bool:
|
||||
"""Verify data integrity constraints"""
|
||||
print("\n=== Checking Data Integrity ===")
|
||||
|
||||
with self.engine.connect() as conn:
|
||||
# Check if canvases table exists
|
||||
tables = self.inspector.get_table_names()
|
||||
if 'canvases' not in tables:
|
||||
self.warn("canvases table does not exist yet - skipping canvas content check")
|
||||
orphaned_metadata = 0
|
||||
else:
|
||||
# Check that all canvas_metadata records have corresponding canvas content
|
||||
result = conn.execute(text("""
|
||||
SELECT COUNT(*)
|
||||
FROM canvas_metadata cm
|
||||
LEFT JOIN canvases c ON cm.id = c.id
|
||||
WHERE c.id IS NULL
|
||||
"""))
|
||||
orphaned_metadata = result.scalar()
|
||||
|
||||
self.check(
|
||||
orphaned_metadata == 0,
|
||||
f"All canvas metadata records have corresponding canvas content",
|
||||
f"Found {orphaned_metadata} canvas metadata records without canvas content"
|
||||
)
|
||||
|
||||
# Check that related_entity_id is set for asset and storyboard canvases
|
||||
result = conn.execute(text("""
|
||||
SELECT COUNT(*)
|
||||
FROM canvas_metadata
|
||||
WHERE canvas_type IN ('asset', 'storyboard')
|
||||
AND related_entity_id IS NULL
|
||||
"""))
|
||||
missing_entity_id = result.scalar()
|
||||
|
||||
self.check(
|
||||
missing_entity_id == 0,
|
||||
"All asset/storyboard canvases have related_entity_id",
|
||||
f"Found {missing_entity_id} asset/storyboard canvases without related_entity_id"
|
||||
)
|
||||
|
||||
# Check that general canvases don't have related_entity_id
|
||||
result = conn.execute(text("""
|
||||
SELECT COUNT(*)
|
||||
FROM canvas_metadata
|
||||
WHERE canvas_type = 'general'
|
||||
AND related_entity_id IS NOT NULL
|
||||
"""))
|
||||
invalid_general = result.scalar()
|
||||
|
||||
self.check(
|
||||
invalid_general == 0,
|
||||
"General canvases don't have related_entity_id",
|
||||
f"Found {invalid_general} general canvases with related_entity_id"
|
||||
)
|
||||
|
||||
return orphaned_metadata == 0 and missing_entity_id == 0 and invalid_general == 0
|
||||
|
||||
def verify_project_relationship(self) -> bool:
|
||||
"""Verify that ProjectDB relationship is working"""
|
||||
print("\n=== Checking Project Relationship ===")
|
||||
|
||||
with self.engine.connect() as conn:
|
||||
# Check that all canvas_metadata records reference valid projects
|
||||
result = conn.execute(text("""
|
||||
SELECT COUNT(*)
|
||||
FROM canvas_metadata cm
|
||||
LEFT JOIN projects p ON cm.project_id = p.id
|
||||
WHERE p.id IS NULL
|
||||
"""))
|
||||
orphaned_canvases = result.scalar()
|
||||
|
||||
self.check(
|
||||
orphaned_canvases == 0,
|
||||
"All canvas metadata records reference valid projects",
|
||||
f"Found {orphaned_canvases} canvas metadata records with invalid project_id"
|
||||
)
|
||||
|
||||
return orphaned_canvases == 0
|
||||
|
||||
def run_all_checks(self) -> bool:
|
||||
"""Run all verification checks"""
|
||||
print("=" * 60)
|
||||
print("Canvas Metadata Migration Verification")
|
||||
print("=" * 60)
|
||||
print(f"Database: {self.db_path}")
|
||||
|
||||
if not os.path.exists(self.db_path):
|
||||
print(f"\n❌ Database file does not exist: {self.db_path}")
|
||||
return False
|
||||
|
||||
# Run all checks
|
||||
table_exists = self.verify_table_exists()
|
||||
if not table_exists:
|
||||
print("\n❌ Cannot continue verification - table does not exist")
|
||||
return False
|
||||
|
||||
self.verify_columns()
|
||||
self.verify_indexes()
|
||||
self.verify_foreign_keys()
|
||||
self.verify_data_migration()
|
||||
self.verify_data_integrity()
|
||||
self.verify_project_relationship()
|
||||
|
||||
# Print summary
|
||||
print("\n" + "=" * 60)
|
||||
print("Verification Summary")
|
||||
print("=" * 60)
|
||||
print(f"Total checks: {self.total_checks}")
|
||||
print(f"Passed: {self.success_count}")
|
||||
print(f"Failed: {len(self.errors)}")
|
||||
print(f"Warnings: {len(self.warnings)}")
|
||||
|
||||
if self.errors:
|
||||
print("\n❌ Errors:")
|
||||
for error in self.errors:
|
||||
print(f" - {error}")
|
||||
|
||||
if self.warnings:
|
||||
print("\n⚠️ Warnings:")
|
||||
for warning in self.warnings:
|
||||
print(f" - {warning}")
|
||||
|
||||
if len(self.errors) == 0:
|
||||
print("\n✅ All checks passed! Migration is successful.")
|
||||
return True
|
||||
else:
|
||||
print("\n❌ Migration verification failed. Please review the errors above.")
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point"""
|
||||
verifier = MigrationVerifier(DB_PATH)
|
||||
success = verifier.run_all_checks()
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
127
backend/alembic/versions/72f609dd9e66_initial_schema.py
Normal file
127
backend/alembic/versions/72f609dd9e66_initial_schema.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""Initial schema
|
||||
|
||||
Revision ID: 72f609dd9e66
|
||||
Revises:
|
||||
Create Date: 2026-01-08 09:52:59.473436
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '72f609dd9e66'
|
||||
down_revision: Union[str, Sequence[str], None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('projects',
|
||||
sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('type', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('status', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('created_at', sa.Float(), nullable=False),
|
||||
sa.Column('updated_at', sa.Float(), nullable=False),
|
||||
sa.Column('resolution', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('ratio', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('style_preset', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('style_params', sa.JSON(), nullable=True),
|
||||
sa.Column('chapters', sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('tasks',
|
||||
sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('type', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('status', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('created_at', sa.Float(), nullable=False),
|
||||
sa.Column('updated_at', sa.Float(), nullable=False),
|
||||
sa.Column('model', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('params', sa.JSON(), nullable=True),
|
||||
sa.Column('provider_task_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('result', sa.JSON(), nullable=True),
|
||||
sa.Column('error', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('assets',
|
||||
sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('project_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('type', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('desc', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('tags', sa.JSON(), nullable=True),
|
||||
sa.Column('image_url', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('image_urls', sa.JSON(), nullable=True),
|
||||
sa.Column('video_urls', sa.JSON(), nullable=True),
|
||||
sa.Column('extra_data', sa.JSON(), nullable=True),
|
||||
sa.Column('generations', sa.JSON(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_assets_project_id'), 'assets', ['project_id'], unique=False)
|
||||
op.create_table('episodes',
|
||||
sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('project_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('order_index', sa.Integer(), nullable=False),
|
||||
sa.Column('title', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('desc', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('content', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('status', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_episodes_project_id'), 'episodes', ['project_id'], unique=False)
|
||||
op.create_table('storyboards',
|
||||
sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('project_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('episode_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('order_index', sa.Integer(), nullable=False),
|
||||
sa.Column('shot', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('desc', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('duration', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('type', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('scene_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('character_ids', sa.JSON(), nullable=True),
|
||||
sa.Column('prop_ids', sa.JSON(), nullable=True),
|
||||
sa.Column('voiceover', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('audio_desc', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('audio_url', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('camera_movement', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('transition', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('visual_anchor', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('visual_dynamics', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('director_note', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('image_prompt', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('video_script', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('image_urls', sa.JSON(), nullable=True),
|
||||
sa.Column('video_urls', sa.JSON(), nullable=True),
|
||||
sa.Column('generations', sa.JSON(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['episode_id'], ['episodes.id'], ),
|
||||
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_storyboards_episode_id'), 'storyboards', ['episode_id'], unique=False)
|
||||
op.create_index(op.f('ix_storyboards_project_id'), 'storyboards', ['project_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_storyboards_project_id'), table_name='storyboards')
|
||||
op.drop_index(op.f('ix_storyboards_episode_id'), table_name='storyboards')
|
||||
op.drop_table('storyboards')
|
||||
op.drop_index(op.f('ix_episodes_project_id'), table_name='episodes')
|
||||
op.drop_table('episodes')
|
||||
op.drop_index(op.f('ix_assets_project_id'), table_name='assets')
|
||||
op.drop_table('assets')
|
||||
op.drop_table('tasks')
|
||||
op.drop_table('projects')
|
||||
# ### end Alembic commands ###
|
||||
245
backend/alembic/versions/add_canvas_metadata_table.py
Normal file
245
backend/alembic/versions/add_canvas_metadata_table.py
Normal file
@@ -0,0 +1,245 @@
|
||||
"""add canvas metadata table
|
||||
|
||||
Revision ID: add_canvas_metadata
|
||||
Revises: bfac9b8e32f5
|
||||
Create Date: 2026-01-17 10:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'add_canvas_metadata'
|
||||
down_revision: Union[str, Sequence[str], None] = ('add_progress_tracking', 'add_prompt_fields')
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# 1. 创建 canvas_metadata 表
|
||||
op.create_table(
|
||||
'canvas_metadata',
|
||||
sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('project_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('canvas_type', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('related_entity_type', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('related_entity_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('order_index', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('is_pinned', sa.Boolean(), nullable=False, server_default='0'),
|
||||
sa.Column('tags', sa.JSON(), nullable=True),
|
||||
sa.Column('node_count', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('last_accessed_at', sa.Float(), nullable=True),
|
||||
sa.Column('access_count', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('created_at', sa.Float(), nullable=False),
|
||||
sa.Column('updated_at', sa.Float(), nullable=False),
|
||||
sa.Column('deleted_at', sa.Float(), nullable=True),
|
||||
sa.Column('legacy_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.ForeignKeyConstraint(['project_id'], ['projects.id'])
|
||||
)
|
||||
|
||||
# 2. 创建索引
|
||||
op.create_index('ix_canvas_metadata_project_id', 'canvas_metadata', ['project_id'])
|
||||
op.create_index('ix_canvas_metadata_canvas_type', 'canvas_metadata', ['canvas_type'])
|
||||
op.create_index('ix_canvas_metadata_related_entity_id', 'canvas_metadata', ['related_entity_id'])
|
||||
op.create_index('ix_canvas_metadata_legacy_id', 'canvas_metadata', ['legacy_id'])
|
||||
op.create_index('ix_canvas_metadata_project_type', 'canvas_metadata', ['project_id', 'canvas_type'])
|
||||
op.create_index('ix_canvas_metadata_type_entity', 'canvas_metadata', ['canvas_type', 'related_entity_id'])
|
||||
|
||||
# 3. 迁移数据
|
||||
migrate_general_canvases()
|
||||
migrate_asset_canvases()
|
||||
migrate_storyboard_canvases()
|
||||
|
||||
|
||||
def migrate_general_canvases():
|
||||
"""迁移通用画布数据"""
|
||||
conn = op.get_bind()
|
||||
|
||||
# 获取所有项目的 general_canvases
|
||||
try:
|
||||
projects = conn.execute(sa.text("SELECT id, general_canvases FROM projects")).fetchall()
|
||||
except:
|
||||
# 如果 general_canvases 列不存在,跳过
|
||||
return
|
||||
|
||||
for project in projects:
|
||||
project_id = project[0]
|
||||
general_canvases_json = project[1]
|
||||
|
||||
if not general_canvases_json:
|
||||
continue
|
||||
|
||||
try:
|
||||
canvases = json.loads(general_canvases_json) if isinstance(general_canvases_json, str) else general_canvases_json
|
||||
except:
|
||||
continue
|
||||
|
||||
if not isinstance(canvases, list):
|
||||
continue
|
||||
|
||||
for idx, canvas in enumerate(canvases):
|
||||
canvas_id = canvas.get('id')
|
||||
if not canvas_id:
|
||||
continue
|
||||
|
||||
# 插入到 canvas_metadata
|
||||
conn.execute(sa.text("""
|
||||
INSERT INTO canvas_metadata (
|
||||
id, project_id, canvas_type, name, order_index,
|
||||
created_at, updated_at
|
||||
) VALUES (
|
||||
:id, :project_id, 'general', :name, :order_index,
|
||||
:created_at, :updated_at
|
||||
)
|
||||
"""), {
|
||||
'id': canvas_id,
|
||||
'project_id': project_id,
|
||||
'name': canvas.get('name', f'Canvas {idx + 1}'),
|
||||
'order_index': idx,
|
||||
'created_at': canvas.get('createdAt', datetime.now().timestamp()),
|
||||
'updated_at': canvas.get('updatedAt', datetime.now().timestamp())
|
||||
})
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
def migrate_asset_canvases():
|
||||
"""迁移素材画布数据"""
|
||||
conn = op.get_bind()
|
||||
|
||||
# 查找所有以 canvas-asset- 开头的画布
|
||||
try:
|
||||
canvases = conn.execute(sa.text("""
|
||||
SELECT id, project_id, updated_at
|
||||
FROM canvases
|
||||
WHERE id LIKE 'canvas-asset-%'
|
||||
""")).fetchall()
|
||||
except:
|
||||
return
|
||||
|
||||
for canvas in canvases:
|
||||
old_id = canvas[0]
|
||||
project_id = canvas[1]
|
||||
updated_at = canvas[2]
|
||||
|
||||
# 提取 asset_id
|
||||
asset_id = old_id.replace('canvas-asset-', '')
|
||||
|
||||
# 查找对应的 asset
|
||||
try:
|
||||
asset = conn.execute(sa.text("""
|
||||
SELECT name FROM assets WHERE id = :asset_id
|
||||
"""), {'asset_id': asset_id}).fetchone()
|
||||
except:
|
||||
continue
|
||||
|
||||
if not asset:
|
||||
continue
|
||||
|
||||
# 生成新 UUID
|
||||
new_id = str(uuid.uuid4())
|
||||
|
||||
# 插入元数据
|
||||
conn.execute(sa.text("""
|
||||
INSERT INTO canvas_metadata (
|
||||
id, project_id, canvas_type, related_entity_type,
|
||||
related_entity_id, name, created_at, updated_at, legacy_id
|
||||
) VALUES (
|
||||
:id, :project_id, 'asset', 'asset',
|
||||
:asset_id, :name, :created_at, :updated_at, :legacy_id
|
||||
)
|
||||
"""), {
|
||||
'id': new_id,
|
||||
'project_id': project_id,
|
||||
'asset_id': asset_id,
|
||||
'name': asset[0],
|
||||
'created_at': updated_at,
|
||||
'updated_at': updated_at,
|
||||
'legacy_id': old_id
|
||||
})
|
||||
|
||||
# 更新 canvases 表的 ID
|
||||
conn.execute(sa.text("""
|
||||
UPDATE canvases SET id = :new_id WHERE id = :old_id
|
||||
"""), {'new_id': new_id, 'old_id': old_id})
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
def migrate_storyboard_canvases():
|
||||
"""迁移分镜画布数据"""
|
||||
conn = op.get_bind()
|
||||
|
||||
# 查找所有以 canvas-storyboard- 开头的画布
|
||||
try:
|
||||
canvases = conn.execute(sa.text("""
|
||||
SELECT id, project_id, updated_at
|
||||
FROM canvases
|
||||
WHERE id LIKE 'canvas-storyboard-%'
|
||||
""")).fetchall()
|
||||
except:
|
||||
return
|
||||
|
||||
for canvas in canvases:
|
||||
old_id = canvas[0]
|
||||
project_id = canvas[1]
|
||||
updated_at = canvas[2]
|
||||
|
||||
storyboard_id = old_id.replace('canvas-storyboard-', '')
|
||||
|
||||
try:
|
||||
storyboard = conn.execute(sa.text("""
|
||||
SELECT shot FROM storyboards WHERE id = :storyboard_id
|
||||
"""), {'storyboard_id': storyboard_id}).fetchone()
|
||||
except:
|
||||
continue
|
||||
|
||||
if not storyboard:
|
||||
continue
|
||||
|
||||
new_id = str(uuid.uuid4())
|
||||
|
||||
conn.execute(sa.text("""
|
||||
INSERT INTO canvas_metadata (
|
||||
id, project_id, canvas_type, related_entity_type,
|
||||
related_entity_id, name, created_at, updated_at, legacy_id
|
||||
) VALUES (
|
||||
:id, :project_id, 'storyboard', 'storyboard',
|
||||
:storyboard_id, :name, :created_at, :updated_at, :legacy_id
|
||||
)
|
||||
"""), {
|
||||
'id': new_id,
|
||||
'project_id': project_id,
|
||||
'storyboard_id': storyboard_id,
|
||||
'name': storyboard[0],
|
||||
'created_at': updated_at,
|
||||
'updated_at': updated_at,
|
||||
'legacy_id': old_id
|
||||
})
|
||||
|
||||
conn.execute(sa.text("""
|
||||
UPDATE canvases SET id = :new_id WHERE id = :old_id
|
||||
"""), {'new_id': new_id, 'old_id': old_id})
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# 回滚操作
|
||||
op.drop_index('ix_canvas_metadata_type_entity', 'canvas_metadata')
|
||||
op.drop_index('ix_canvas_metadata_project_type', 'canvas_metadata')
|
||||
op.drop_index('ix_canvas_metadata_legacy_id', 'canvas_metadata')
|
||||
op.drop_index('ix_canvas_metadata_related_entity_id', 'canvas_metadata')
|
||||
op.drop_index('ix_canvas_metadata_canvas_type', 'canvas_metadata')
|
||||
op.drop_index('ix_canvas_metadata_project_id', 'canvas_metadata')
|
||||
op.drop_table('canvas_metadata')
|
||||
41
backend/alembic/versions/add_cinematic_fields.py
Normal file
41
backend/alembic/versions/add_cinematic_fields.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""add cinematic and professional fields to assets and storyboards
|
||||
|
||||
Revision ID: add_cinematic_fields
|
||||
Revises: add_prompt_fields
|
||||
Create Date: 2026-01-20
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'add_cinematic_fields'
|
||||
down_revision = 'add_canvas_metadata'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Assets表已经使用extra_data存储这些字段,但为了查询效率,我们可以选择不添加直接列
|
||||
# 因为Asset的emotion, environment_type, weather等字段已经通过extra_data JSON存储
|
||||
# 如果未来需要索引查询,可以添加:
|
||||
# op.add_column('assets', sa.Column('emotion', sa.String(), nullable=True))
|
||||
# op.add_column('assets', sa.Column('environment_type', sa.String(), nullable=True))
|
||||
# op.add_column('assets', sa.Column('weather', sa.String(), nullable=True))
|
||||
|
||||
# Add cinematic control fields to storyboards table
|
||||
op.add_column('storyboards', sa.Column('camera_angle', sa.String(), nullable=True))
|
||||
op.add_column('storyboards', sa.Column('lens', sa.String(), nullable=True))
|
||||
op.add_column('storyboards', sa.Column('focus', sa.String(), nullable=True))
|
||||
op.add_column('storyboards', sa.Column('lighting', sa.String(), nullable=True))
|
||||
op.add_column('storyboards', sa.Column('color_style', sa.String(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Remove cinematic fields from storyboards
|
||||
op.drop_column('storyboards', 'color_style')
|
||||
op.drop_column('storyboards', 'lighting')
|
||||
op.drop_column('storyboards', 'focus')
|
||||
op.drop_column('storyboards', 'lens')
|
||||
op.drop_column('storyboards', 'camera_angle')
|
||||
100
backend/alembic/versions/add_indexes_and_optimizations.py
Normal file
100
backend/alembic/versions/add_indexes_and_optimizations.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""Add indexes and database optimizations
|
||||
|
||||
Revision ID: add_indexes_opt
|
||||
Revises: bfac9b8e32f5
|
||||
Create Date: 2026-01-14 10:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'add_indexes_opt'
|
||||
down_revision: Union[str, Sequence[str], None] = 'bfac9b8e32f5'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add indexes, soft delete columns, and full-text search support."""
|
||||
|
||||
# Add soft delete columns
|
||||
op.add_column('projects', sa.Column('deleted_at', sa.Float(), nullable=True))
|
||||
op.add_column('assets', sa.Column('deleted_at', sa.Float(), nullable=True))
|
||||
op.add_column('episodes', sa.Column('deleted_at', sa.Float(), nullable=True))
|
||||
op.add_column('storyboards', sa.Column('deleted_at', sa.Float(), nullable=True))
|
||||
op.add_column('tasks', sa.Column('deleted_at', sa.Float(), nullable=True))
|
||||
|
||||
# Add indexes for frequently queried fields on projects
|
||||
op.create_index('idx_projects_created_at', 'projects', ['created_at'])
|
||||
op.create_index('idx_projects_updated_at', 'projects', ['updated_at'])
|
||||
op.create_index('idx_projects_status', 'projects', ['status'])
|
||||
op.create_index('idx_projects_deleted_at', 'projects', ['deleted_at'])
|
||||
|
||||
# Add indexes for tasks
|
||||
op.create_index('idx_tasks_status', 'tasks', ['status'])
|
||||
op.create_index('idx_tasks_type', 'tasks', ['type'])
|
||||
op.create_index('idx_tasks_created_at', 'tasks', ['created_at'])
|
||||
op.create_index('idx_tasks_type_status', 'tasks', ['type', 'status'])
|
||||
op.create_index('idx_tasks_deleted_at', 'tasks', ['deleted_at'])
|
||||
|
||||
# Add indexes for assets
|
||||
op.create_index('idx_assets_type', 'assets', ['type'])
|
||||
op.create_index('idx_assets_deleted_at', 'assets', ['deleted_at'])
|
||||
|
||||
# Add indexes for episodes
|
||||
op.create_index('idx_episodes_status', 'episodes', ['status'])
|
||||
op.create_index('idx_episodes_order_index', 'episodes', ['order_index'])
|
||||
op.create_index('idx_episodes_deleted_at', 'episodes', ['deleted_at'])
|
||||
|
||||
# Add indexes for storyboards
|
||||
op.create_index('idx_storyboards_type', 'storyboards', ['type'])
|
||||
op.create_index('idx_storyboards_order_index', 'storyboards', ['order_index'])
|
||||
op.create_index('idx_storyboards_deleted_at', 'storyboards', ['deleted_at'])
|
||||
|
||||
# Note: SQLite doesn't support full-text search indexes like PostgreSQL
|
||||
# For SQLite, we'll use the FTS5 virtual table approach in the application layer
|
||||
# or use LIKE queries with indexes on the name columns
|
||||
# Adding index on name columns for better LIKE query performance
|
||||
op.create_index('idx_projects_name', 'projects', ['name'])
|
||||
op.create_index('idx_assets_name', 'assets', ['name'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove indexes, soft delete columns, and full-text search support."""
|
||||
|
||||
# Drop indexes
|
||||
op.drop_index('idx_assets_name', table_name='assets')
|
||||
op.drop_index('idx_projects_name', table_name='projects')
|
||||
|
||||
op.drop_index('idx_storyboards_deleted_at', table_name='storyboards')
|
||||
op.drop_index('idx_storyboards_order_index', table_name='storyboards')
|
||||
op.drop_index('idx_storyboards_type', table_name='storyboards')
|
||||
|
||||
op.drop_index('idx_episodes_deleted_at', table_name='episodes')
|
||||
op.drop_index('idx_episodes_order_index', table_name='episodes')
|
||||
op.drop_index('idx_episodes_status', table_name='episodes')
|
||||
|
||||
op.drop_index('idx_assets_deleted_at', table_name='assets')
|
||||
op.drop_index('idx_assets_type', table_name='assets')
|
||||
|
||||
op.drop_index('idx_tasks_deleted_at', table_name='tasks')
|
||||
op.drop_index('idx_tasks_type_status', table_name='tasks')
|
||||
op.drop_index('idx_tasks_created_at', table_name='tasks')
|
||||
op.drop_index('idx_tasks_type', table_name='tasks')
|
||||
op.drop_index('idx_tasks_status', table_name='tasks')
|
||||
|
||||
op.drop_index('idx_projects_deleted_at', table_name='projects')
|
||||
op.drop_index('idx_projects_status', table_name='projects')
|
||||
op.drop_index('idx_projects_updated_at', table_name='projects')
|
||||
op.drop_index('idx_projects_created_at', table_name='projects')
|
||||
|
||||
# Drop soft delete columns
|
||||
op.drop_column('tasks', 'deleted_at')
|
||||
op.drop_column('storyboards', 'deleted_at')
|
||||
op.drop_column('episodes', 'deleted_at')
|
||||
op.drop_column('assets', 'deleted_at')
|
||||
op.drop_column('projects', 'deleted_at')
|
||||
30
backend/alembic/versions/add_progress_tracking.py
Normal file
30
backend/alembic/versions/add_progress_tracking.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""add progress tracking fields
|
||||
|
||||
Revision ID: add_progress_tracking
|
||||
Revises: add_task_mgmt_fields
|
||||
Create Date: 2026-01-16 15:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import sqlite
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'add_progress_tracking'
|
||||
down_revision = 'add_task_mgmt_fields'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add progress and error columns to projects table
|
||||
with op.batch_alter_table('projects', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('progress', sa.JSON(), nullable=True))
|
||||
batch_op.add_column(sa.Column('error', sa.JSON(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Remove progress and error columns from projects table
|
||||
with op.batch_alter_table('projects', schema=None) as batch_op:
|
||||
batch_op.drop_column('error')
|
||||
batch_op.drop_column('progress')
|
||||
36
backend/alembic/versions/add_prompt_fields.py
Normal file
36
backend/alembic/versions/add_prompt_fields.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""add prompt fields to assets and storyboards
|
||||
|
||||
Revision ID: add_prompt_fields
|
||||
Revises: add_task_mgmt_fields
|
||||
Create Date: 2026-01-16
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'add_prompt_fields'
|
||||
down_revision = 'add_task_mgmt_fields'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add image_prompt to assets table
|
||||
op.add_column('assets', sa.Column('image_prompt', sa.String(), nullable=True))
|
||||
|
||||
# Add prompt fields to storyboards table
|
||||
op.add_column('storyboards', sa.Column('original_text', sa.String(), nullable=True))
|
||||
op.add_column('storyboards', sa.Column('merge_image_prompt', sa.String(), nullable=True))
|
||||
op.add_column('storyboards', sa.Column('video_prompt', sa.String(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Remove fields from storyboards
|
||||
op.drop_column('storyboards', 'video_prompt')
|
||||
op.drop_column('storyboards', 'merge_image_prompt')
|
||||
op.drop_column('storyboards', 'original_text')
|
||||
|
||||
# Remove field from assets
|
||||
op.drop_column('assets', 'image_prompt')
|
||||
42
backend/alembic/versions/add_provider_to_tasks.py
Normal file
42
backend/alembic/versions/add_provider_to_tasks.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""add provider to tasks
|
||||
|
||||
Revision ID: add_provider_to_tasks
|
||||
Revises: bfac9b8e32f5
|
||||
Create Date: 2024-02-11
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'add_provider_to_tasks'
|
||||
down_revision = 'bfac9b8e32f5'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add provider column to tasks table"""
|
||||
# Add provider column (nullable, indexed)
|
||||
op.add_column('tasks', sa.Column('provider', sa.String(), nullable=True))
|
||||
|
||||
# Create index on provider column for faster queries
|
||||
op.create_index(op.f('ix_tasks_provider'), 'tasks', ['provider'], unique=False)
|
||||
|
||||
# Optional: Migrate existing data by extracting provider from params
|
||||
# This is a data migration that can be run separately if needed
|
||||
op.execute("""
|
||||
UPDATE tasks
|
||||
SET provider = params->>'provider'
|
||||
WHERE params->>'provider' IS NOT NULL
|
||||
""")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove provider column from tasks table"""
|
||||
# Drop index first
|
||||
op.drop_index(op.f('ix_tasks_provider'), table_name='tasks')
|
||||
|
||||
# Drop column
|
||||
op.drop_column('tasks', 'provider')
|
||||
50
backend/alembic/versions/add_task_management_fields.py
Normal file
50
backend/alembic/versions/add_task_management_fields.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""add task management fields
|
||||
|
||||
Revision ID: add_task_mgmt_fields
|
||||
Revises: add_indexes_opt
|
||||
Create Date: 2026-01-14
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'add_task_mgmt_fields'
|
||||
down_revision = 'add_indexes_opt'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# Add retry configuration fields
|
||||
op.add_column('tasks', sa.Column('retry_count', sa.Integer(), nullable=False, server_default='0'))
|
||||
op.add_column('tasks', sa.Column('max_retries', sa.Integer(), nullable=False, server_default='3'))
|
||||
|
||||
# Add timestamp fields for task lifecycle
|
||||
op.add_column('tasks', sa.Column('started_at', sa.Float(), nullable=True))
|
||||
op.add_column('tasks', sa.Column('completed_at', sa.Float(), nullable=True))
|
||||
|
||||
# Add user context fields
|
||||
op.add_column('tasks', sa.Column('user_id', sa.String(), nullable=True))
|
||||
op.add_column('tasks', sa.Column('project_id', sa.String(), nullable=True))
|
||||
|
||||
# Add indexes for new fields
|
||||
op.create_index('idx_tasks_user_id', 'tasks', ['user_id'])
|
||||
op.create_index('idx_tasks_project_id', 'tasks', ['project_id'])
|
||||
|
||||
# Note: deleted_at column already exists from previous migration
|
||||
|
||||
|
||||
def downgrade():
|
||||
# Remove indexes
|
||||
op.drop_index('idx_tasks_project_id', table_name='tasks')
|
||||
op.drop_index('idx_tasks_user_id', table_name='tasks')
|
||||
|
||||
# Remove columns
|
||||
op.drop_column('tasks', 'project_id')
|
||||
op.drop_column('tasks', 'user_id')
|
||||
op.drop_column('tasks', 'completed_at')
|
||||
op.drop_column('tasks', 'started_at')
|
||||
op.drop_column('tasks', 'max_retries')
|
||||
op.drop_column('tasks', 'retry_count')
|
||||
54
backend/alembic/versions/add_user_sessions.py
Normal file
54
backend/alembic/versions/add_user_sessions.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""add user_sessions table
|
||||
|
||||
Revision ID: add_user_sessions
|
||||
Revises: b546dbb9df98
|
||||
Create Date: 2026-03-09
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
revision: str = 'add_user_sessions'
|
||||
down_revision: Union[str, Sequence[str], None] = 'b546dbb9df98'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
'user_sessions',
|
||||
sa.Column('id', sa.String(), nullable=False),
|
||||
sa.Column('user_id', sa.String(), nullable=False),
|
||||
sa.Column('session_family_id', sa.String(), nullable=False),
|
||||
sa.Column('refresh_token_hash', sa.String(), nullable=False),
|
||||
sa.Column('status', sa.String(), nullable=False, server_default='active'),
|
||||
sa.Column('created_at', sa.Float(), nullable=False),
|
||||
sa.Column('updated_at', sa.Float(), nullable=False),
|
||||
sa.Column('expires_at', sa.Float(), nullable=False),
|
||||
sa.Column('last_used_at', sa.Float(), nullable=True),
|
||||
sa.Column('revoked_at', sa.Float(), nullable=True),
|
||||
sa.Column('revoked_reason', sa.String(), nullable=True),
|
||||
sa.Column('replaced_by_session_id', sa.String(), nullable=True),
|
||||
sa.Column('ip_address', sa.String(), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('device_name', sa.String(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
op.create_index(op.f('ix_user_sessions_user_id'), 'user_sessions', ['user_id'], unique=False)
|
||||
op.create_index(op.f('ix_user_sessions_session_family_id'), 'user_sessions', ['session_family_id'], unique=False)
|
||||
op.create_index(op.f('ix_user_sessions_refresh_token_hash'), 'user_sessions', ['refresh_token_hash'], unique=False)
|
||||
op.create_index(op.f('ix_user_sessions_status'), 'user_sessions', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_user_sessions_revoked_at'), 'user_sessions', ['revoked_at'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_user_sessions_revoked_at'), table_name='user_sessions')
|
||||
op.drop_index(op.f('ix_user_sessions_status'), table_name='user_sessions')
|
||||
op.drop_index(op.f('ix_user_sessions_refresh_token_hash'), table_name='user_sessions')
|
||||
op.drop_index(op.f('ix_user_sessions_session_family_id'), table_name='user_sessions')
|
||||
op.drop_index(op.f('ix_user_sessions_user_id'), table_name='user_sessions')
|
||||
op.drop_table('user_sessions')
|
||||
@@ -0,0 +1,72 @@
|
||||
"""add_users_and_api_keys_tables
|
||||
|
||||
Revision ID: b546dbb9df98
|
||||
Revises: rename_style_preset
|
||||
Create Date: 2026-02-14 13:01:36.394119
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'b546dbb9df98'
|
||||
down_revision: Union[str, Sequence[str], None] = 'rename_style_preset'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema - Add users and user_api_keys tables."""
|
||||
# Create users table
|
||||
op.create_table(
|
||||
'users',
|
||||
sa.Column('id', sa.String(), nullable=False),
|
||||
sa.Column('username', sa.String(), nullable=False),
|
||||
sa.Column('email', sa.String(), nullable=True),
|
||||
sa.Column('password_hash', sa.String(), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='1'),
|
||||
sa.Column('is_superuser', sa.Boolean(), nullable=False, server_default='0'),
|
||||
sa.Column('permissions', sa.JSON(), nullable=False, server_default='[]'),
|
||||
sa.Column('roles', sa.JSON(), nullable=False, server_default='[]'),
|
||||
sa.Column('created_at', sa.Float(), nullable=False),
|
||||
sa.Column('updated_at', sa.Float(), nullable=False),
|
||||
sa.Column('last_login', sa.Float(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('email'),
|
||||
sa.UniqueConstraint('username')
|
||||
)
|
||||
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=False)
|
||||
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=False)
|
||||
|
||||
# Create user_api_keys table
|
||||
op.create_table(
|
||||
'user_api_keys',
|
||||
sa.Column('id', sa.String(), nullable=False),
|
||||
sa.Column('user_id', sa.String(), nullable=False),
|
||||
sa.Column('provider', sa.String(), nullable=False),
|
||||
sa.Column('encrypted_key', sa.String(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='1'),
|
||||
sa.Column('created_at', sa.Float(), nullable=False),
|
||||
sa.Column('updated_at', sa.Float(), nullable=False),
|
||||
sa.Column('last_used_at', sa.Float(), nullable=True),
|
||||
sa.Column('usage_count', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('extra_config', sa.JSON(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_user_api_keys_provider'), 'user_api_keys', ['provider'], unique=False)
|
||||
op.create_index(op.f('ix_user_api_keys_user_id'), 'user_api_keys', ['user_id'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema - Remove users and user_api_keys tables."""
|
||||
op.drop_index(op.f('ix_user_api_keys_user_id'), table_name='user_api_keys')
|
||||
op.drop_index(op.f('ix_user_api_keys_provider'), table_name='user_api_keys')
|
||||
op.drop_table('user_api_keys')
|
||||
op.drop_index(op.f('ix_users_username'), table_name='users')
|
||||
op.drop_index(op.f('ix_users_email'), table_name='users')
|
||||
op.drop_table('users')
|
||||
@@ -0,0 +1,30 @@
|
||||
"""add location and time to storyboards
|
||||
|
||||
Revision ID: bfac9b8e32f5
|
||||
Revises: 72f609dd9e66
|
||||
Create Date: 2026-01-11 00:49:48.323949
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'bfac9b8e32f5'
|
||||
down_revision: Union[str, Sequence[str], None] = '72f609dd9e66'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
op.add_column('storyboards', sa.Column('location', sa.String(), nullable=True))
|
||||
op.add_column('storyboards', sa.Column('time', sa.String(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
op.drop_column('storyboards', 'time')
|
||||
op.drop_column('storyboards', 'location')
|
||||
34
backend/alembic/versions/rename_style_preset_to_style_id.py
Normal file
34
backend/alembic/versions/rename_style_preset_to_style_id.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""rename style_preset to style_id
|
||||
|
||||
Revision ID: rename_style_preset
|
||||
Revises: add_cinematic_fields, add_provider_to_tasks
|
||||
Create Date: 2024-02-11
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from typing import Union, Sequence
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'rename_style_preset'
|
||||
down_revision: Union[str, Sequence[str], None] = ('add_cinematic_fields', 'add_provider_to_tasks')
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Rename style_preset column to style_id in projects table"""
|
||||
# SQLite doesn't support ALTER COLUMN RENAME directly
|
||||
# We need to use a workaround with table recreation
|
||||
|
||||
with op.batch_alter_table('projects', schema=None) as batch_op:
|
||||
# Rename the column
|
||||
batch_op.alter_column('style_preset', new_column_name='style_id')
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Revert style_id column back to style_preset"""
|
||||
with op.batch_alter_table('projects', schema=None) as batch_op:
|
||||
# Rename back
|
||||
batch_op.alter_column('style_id', new_column_name='style_preset')
|
||||
Reference in New Issue
Block a user