Initial commit: Pixel AI comic/video creation platform

- FastAPI backend with SQLModel, Alembic migrations, AgentScope agents
- Next.js 15 frontend with React 19, Tailwind, Zustand, React Flow
- Multi-provider AI system (DashScope, Kling, MiniMax, Volcengine, OpenAI, etc.)
- All HTTP clients migrated from sync requests to async httpx
- Admin-managed API keys via environment variables
- SSRF vulnerability fixed in ensure_url()
This commit is contained in:
张鹏
2026-04-29 01:20:12 +08:00
commit f9f4560459
808 changed files with 151724 additions and 0 deletions

View File

@@ -0,0 +1,127 @@
"""Initial schema
Revision ID: 72f609dd9e66
Revises:
Create Date: 2026-01-08 09:52:59.473436
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision: str = '72f609dd9e66'
down_revision: Union[str, Sequence[str], None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('projects',
sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('type', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('status', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('created_at', sa.Float(), nullable=False),
sa.Column('updated_at', sa.Float(), nullable=False),
sa.Column('resolution', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('ratio', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('style_preset', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('style_params', sa.JSON(), nullable=True),
sa.Column('chapters', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tasks',
sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('type', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('status', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('created_at', sa.Float(), nullable=False),
sa.Column('updated_at', sa.Float(), nullable=False),
sa.Column('model', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('params', sa.JSON(), nullable=True),
sa.Column('provider_task_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('result', sa.JSON(), nullable=True),
sa.Column('error', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('assets',
sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('project_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('type', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('desc', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('tags', sa.JSON(), nullable=True),
sa.Column('image_url', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('image_urls', sa.JSON(), nullable=True),
sa.Column('video_urls', sa.JSON(), nullable=True),
sa.Column('extra_data', sa.JSON(), nullable=True),
sa.Column('generations', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_assets_project_id'), 'assets', ['project_id'], unique=False)
op.create_table('episodes',
sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('project_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('order_index', sa.Integer(), nullable=False),
sa.Column('title', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('desc', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('content', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('status', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_episodes_project_id'), 'episodes', ['project_id'], unique=False)
op.create_table('storyboards',
sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('project_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('episode_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('order_index', sa.Integer(), nullable=False),
sa.Column('shot', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('desc', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('duration', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('type', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('scene_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('character_ids', sa.JSON(), nullable=True),
sa.Column('prop_ids', sa.JSON(), nullable=True),
sa.Column('voiceover', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('audio_desc', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('audio_url', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('camera_movement', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('transition', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('visual_anchor', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('visual_dynamics', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('director_note', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('image_prompt', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('video_script', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('image_urls', sa.JSON(), nullable=True),
sa.Column('video_urls', sa.JSON(), nullable=True),
sa.Column('generations', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['episode_id'], ['episodes.id'], ),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_storyboards_episode_id'), 'storyboards', ['episode_id'], unique=False)
op.create_index(op.f('ix_storyboards_project_id'), 'storyboards', ['project_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_storyboards_project_id'), table_name='storyboards')
op.drop_index(op.f('ix_storyboards_episode_id'), table_name='storyboards')
op.drop_table('storyboards')
op.drop_index(op.f('ix_episodes_project_id'), table_name='episodes')
op.drop_table('episodes')
op.drop_index(op.f('ix_assets_project_id'), table_name='assets')
op.drop_table('assets')
op.drop_table('tasks')
op.drop_table('projects')
# ### end Alembic commands ###

View File

@@ -0,0 +1,245 @@
"""add canvas metadata table
Revision ID: add_canvas_metadata
Revises: bfac9b8e32f5
Create Date: 2026-01-17 10:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel
import json
import uuid
from datetime import datetime
# revision identifiers, used by Alembic.
revision: str = 'add_canvas_metadata'
down_revision: Union[str, Sequence[str], None] = ('add_progress_tracking', 'add_prompt_fields')
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# 1. 创建 canvas_metadata 表
op.create_table(
'canvas_metadata',
sa.Column('id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('project_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('canvas_type', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('related_entity_type', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('related_entity_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('order_index', sa.Integer(), nullable=False, server_default='0'),
sa.Column('is_pinned', sa.Boolean(), nullable=False, server_default='0'),
sa.Column('tags', sa.JSON(), nullable=True),
sa.Column('node_count', sa.Integer(), nullable=False, server_default='0'),
sa.Column('last_accessed_at', sa.Float(), nullable=True),
sa.Column('access_count', sa.Integer(), nullable=False, server_default='0'),
sa.Column('created_at', sa.Float(), nullable=False),
sa.Column('updated_at', sa.Float(), nullable=False),
sa.Column('deleted_at', sa.Float(), nullable=True),
sa.Column('legacy_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'])
)
# 2. 创建索引
op.create_index('ix_canvas_metadata_project_id', 'canvas_metadata', ['project_id'])
op.create_index('ix_canvas_metadata_canvas_type', 'canvas_metadata', ['canvas_type'])
op.create_index('ix_canvas_metadata_related_entity_id', 'canvas_metadata', ['related_entity_id'])
op.create_index('ix_canvas_metadata_legacy_id', 'canvas_metadata', ['legacy_id'])
op.create_index('ix_canvas_metadata_project_type', 'canvas_metadata', ['project_id', 'canvas_type'])
op.create_index('ix_canvas_metadata_type_entity', 'canvas_metadata', ['canvas_type', 'related_entity_id'])
# 3. 迁移数据
migrate_general_canvases()
migrate_asset_canvases()
migrate_storyboard_canvases()
def migrate_general_canvases():
"""迁移通用画布数据"""
conn = op.get_bind()
# 获取所有项目的 general_canvases
try:
projects = conn.execute(sa.text("SELECT id, general_canvases FROM projects")).fetchall()
except:
# 如果 general_canvases 列不存在,跳过
return
for project in projects:
project_id = project[0]
general_canvases_json = project[1]
if not general_canvases_json:
continue
try:
canvases = json.loads(general_canvases_json) if isinstance(general_canvases_json, str) else general_canvases_json
except:
continue
if not isinstance(canvases, list):
continue
for idx, canvas in enumerate(canvases):
canvas_id = canvas.get('id')
if not canvas_id:
continue
# 插入到 canvas_metadata
conn.execute(sa.text("""
INSERT INTO canvas_metadata (
id, project_id, canvas_type, name, order_index,
created_at, updated_at
) VALUES (
:id, :project_id, 'general', :name, :order_index,
:created_at, :updated_at
)
"""), {
'id': canvas_id,
'project_id': project_id,
'name': canvas.get('name', f'Canvas {idx + 1}'),
'order_index': idx,
'created_at': canvas.get('createdAt', datetime.now().timestamp()),
'updated_at': canvas.get('updatedAt', datetime.now().timestamp())
})
conn.commit()
def migrate_asset_canvases():
"""迁移素材画布数据"""
conn = op.get_bind()
# 查找所有以 canvas-asset- 开头的画布
try:
canvases = conn.execute(sa.text("""
SELECT id, project_id, updated_at
FROM canvases
WHERE id LIKE 'canvas-asset-%'
""")).fetchall()
except:
return
for canvas in canvases:
old_id = canvas[0]
project_id = canvas[1]
updated_at = canvas[2]
# 提取 asset_id
asset_id = old_id.replace('canvas-asset-', '')
# 查找对应的 asset
try:
asset = conn.execute(sa.text("""
SELECT name FROM assets WHERE id = :asset_id
"""), {'asset_id': asset_id}).fetchone()
except:
continue
if not asset:
continue
# 生成新 UUID
new_id = str(uuid.uuid4())
# 插入元数据
conn.execute(sa.text("""
INSERT INTO canvas_metadata (
id, project_id, canvas_type, related_entity_type,
related_entity_id, name, created_at, updated_at, legacy_id
) VALUES (
:id, :project_id, 'asset', 'asset',
:asset_id, :name, :created_at, :updated_at, :legacy_id
)
"""), {
'id': new_id,
'project_id': project_id,
'asset_id': asset_id,
'name': asset[0],
'created_at': updated_at,
'updated_at': updated_at,
'legacy_id': old_id
})
# 更新 canvases 表的 ID
conn.execute(sa.text("""
UPDATE canvases SET id = :new_id WHERE id = :old_id
"""), {'new_id': new_id, 'old_id': old_id})
conn.commit()
def migrate_storyboard_canvases():
"""迁移分镜画布数据"""
conn = op.get_bind()
# 查找所有以 canvas-storyboard- 开头的画布
try:
canvases = conn.execute(sa.text("""
SELECT id, project_id, updated_at
FROM canvases
WHERE id LIKE 'canvas-storyboard-%'
""")).fetchall()
except:
return
for canvas in canvases:
old_id = canvas[0]
project_id = canvas[1]
updated_at = canvas[2]
storyboard_id = old_id.replace('canvas-storyboard-', '')
try:
storyboard = conn.execute(sa.text("""
SELECT shot FROM storyboards WHERE id = :storyboard_id
"""), {'storyboard_id': storyboard_id}).fetchone()
except:
continue
if not storyboard:
continue
new_id = str(uuid.uuid4())
conn.execute(sa.text("""
INSERT INTO canvas_metadata (
id, project_id, canvas_type, related_entity_type,
related_entity_id, name, created_at, updated_at, legacy_id
) VALUES (
:id, :project_id, 'storyboard', 'storyboard',
:storyboard_id, :name, :created_at, :updated_at, :legacy_id
)
"""), {
'id': new_id,
'project_id': project_id,
'storyboard_id': storyboard_id,
'name': storyboard[0],
'created_at': updated_at,
'updated_at': updated_at,
'legacy_id': old_id
})
conn.execute(sa.text("""
UPDATE canvases SET id = :new_id WHERE id = :old_id
"""), {'new_id': new_id, 'old_id': old_id})
conn.commit()
def downgrade() -> None:
"""Downgrade schema."""
# 回滚操作
op.drop_index('ix_canvas_metadata_type_entity', 'canvas_metadata')
op.drop_index('ix_canvas_metadata_project_type', 'canvas_metadata')
op.drop_index('ix_canvas_metadata_legacy_id', 'canvas_metadata')
op.drop_index('ix_canvas_metadata_related_entity_id', 'canvas_metadata')
op.drop_index('ix_canvas_metadata_canvas_type', 'canvas_metadata')
op.drop_index('ix_canvas_metadata_project_id', 'canvas_metadata')
op.drop_table('canvas_metadata')

View File

@@ -0,0 +1,41 @@
"""add cinematic and professional fields to assets and storyboards
Revision ID: add_cinematic_fields
Revises: add_prompt_fields
Create Date: 2026-01-20
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'add_cinematic_fields'
down_revision = 'add_canvas_metadata'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Assets表已经使用extra_data存储这些字段但为了查询效率我们可以选择不添加直接列
# 因为Asset的emotion, environment_type, weather等字段已经通过extra_data JSON存储
# 如果未来需要索引查询,可以添加:
# op.add_column('assets', sa.Column('emotion', sa.String(), nullable=True))
# op.add_column('assets', sa.Column('environment_type', sa.String(), nullable=True))
# op.add_column('assets', sa.Column('weather', sa.String(), nullable=True))
# Add cinematic control fields to storyboards table
op.add_column('storyboards', sa.Column('camera_angle', sa.String(), nullable=True))
op.add_column('storyboards', sa.Column('lens', sa.String(), nullable=True))
op.add_column('storyboards', sa.Column('focus', sa.String(), nullable=True))
op.add_column('storyboards', sa.Column('lighting', sa.String(), nullable=True))
op.add_column('storyboards', sa.Column('color_style', sa.String(), nullable=True))
def downgrade() -> None:
# Remove cinematic fields from storyboards
op.drop_column('storyboards', 'color_style')
op.drop_column('storyboards', 'lighting')
op.drop_column('storyboards', 'focus')
op.drop_column('storyboards', 'lens')
op.drop_column('storyboards', 'camera_angle')

View File

@@ -0,0 +1,100 @@
"""Add indexes and database optimizations
Revision ID: add_indexes_opt
Revises: bfac9b8e32f5
Create Date: 2026-01-14 10:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'add_indexes_opt'
down_revision: Union[str, Sequence[str], None] = 'bfac9b8e32f5'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Add indexes, soft delete columns, and full-text search support."""
# Add soft delete columns
op.add_column('projects', sa.Column('deleted_at', sa.Float(), nullable=True))
op.add_column('assets', sa.Column('deleted_at', sa.Float(), nullable=True))
op.add_column('episodes', sa.Column('deleted_at', sa.Float(), nullable=True))
op.add_column('storyboards', sa.Column('deleted_at', sa.Float(), nullable=True))
op.add_column('tasks', sa.Column('deleted_at', sa.Float(), nullable=True))
# Add indexes for frequently queried fields on projects
op.create_index('idx_projects_created_at', 'projects', ['created_at'])
op.create_index('idx_projects_updated_at', 'projects', ['updated_at'])
op.create_index('idx_projects_status', 'projects', ['status'])
op.create_index('idx_projects_deleted_at', 'projects', ['deleted_at'])
# Add indexes for tasks
op.create_index('idx_tasks_status', 'tasks', ['status'])
op.create_index('idx_tasks_type', 'tasks', ['type'])
op.create_index('idx_tasks_created_at', 'tasks', ['created_at'])
op.create_index('idx_tasks_type_status', 'tasks', ['type', 'status'])
op.create_index('idx_tasks_deleted_at', 'tasks', ['deleted_at'])
# Add indexes for assets
op.create_index('idx_assets_type', 'assets', ['type'])
op.create_index('idx_assets_deleted_at', 'assets', ['deleted_at'])
# Add indexes for episodes
op.create_index('idx_episodes_status', 'episodes', ['status'])
op.create_index('idx_episodes_order_index', 'episodes', ['order_index'])
op.create_index('idx_episodes_deleted_at', 'episodes', ['deleted_at'])
# Add indexes for storyboards
op.create_index('idx_storyboards_type', 'storyboards', ['type'])
op.create_index('idx_storyboards_order_index', 'storyboards', ['order_index'])
op.create_index('idx_storyboards_deleted_at', 'storyboards', ['deleted_at'])
# Note: SQLite doesn't support full-text search indexes like PostgreSQL
# For SQLite, we'll use the FTS5 virtual table approach in the application layer
# or use LIKE queries with indexes on the name columns
# Adding index on name columns for better LIKE query performance
op.create_index('idx_projects_name', 'projects', ['name'])
op.create_index('idx_assets_name', 'assets', ['name'])
def downgrade() -> None:
"""Remove indexes, soft delete columns, and full-text search support."""
# Drop indexes
op.drop_index('idx_assets_name', table_name='assets')
op.drop_index('idx_projects_name', table_name='projects')
op.drop_index('idx_storyboards_deleted_at', table_name='storyboards')
op.drop_index('idx_storyboards_order_index', table_name='storyboards')
op.drop_index('idx_storyboards_type', table_name='storyboards')
op.drop_index('idx_episodes_deleted_at', table_name='episodes')
op.drop_index('idx_episodes_order_index', table_name='episodes')
op.drop_index('idx_episodes_status', table_name='episodes')
op.drop_index('idx_assets_deleted_at', table_name='assets')
op.drop_index('idx_assets_type', table_name='assets')
op.drop_index('idx_tasks_deleted_at', table_name='tasks')
op.drop_index('idx_tasks_type_status', table_name='tasks')
op.drop_index('idx_tasks_created_at', table_name='tasks')
op.drop_index('idx_tasks_type', table_name='tasks')
op.drop_index('idx_tasks_status', table_name='tasks')
op.drop_index('idx_projects_deleted_at', table_name='projects')
op.drop_index('idx_projects_status', table_name='projects')
op.drop_index('idx_projects_updated_at', table_name='projects')
op.drop_index('idx_projects_created_at', table_name='projects')
# Drop soft delete columns
op.drop_column('tasks', 'deleted_at')
op.drop_column('storyboards', 'deleted_at')
op.drop_column('episodes', 'deleted_at')
op.drop_column('assets', 'deleted_at')
op.drop_column('projects', 'deleted_at')

View File

@@ -0,0 +1,30 @@
"""add progress tracking fields
Revision ID: add_progress_tracking
Revises: add_task_mgmt_fields
Create Date: 2026-01-16 15:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
# revision identifiers, used by Alembic.
revision = 'add_progress_tracking'
down_revision = 'add_task_mgmt_fields'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add progress and error columns to projects table
with op.batch_alter_table('projects', schema=None) as batch_op:
batch_op.add_column(sa.Column('progress', sa.JSON(), nullable=True))
batch_op.add_column(sa.Column('error', sa.JSON(), nullable=True))
def downgrade() -> None:
# Remove progress and error columns from projects table
with op.batch_alter_table('projects', schema=None) as batch_op:
batch_op.drop_column('error')
batch_op.drop_column('progress')

View File

@@ -0,0 +1,36 @@
"""add prompt fields to assets and storyboards
Revision ID: add_prompt_fields
Revises: add_task_mgmt_fields
Create Date: 2026-01-16
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'add_prompt_fields'
down_revision = 'add_task_mgmt_fields'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add image_prompt to assets table
op.add_column('assets', sa.Column('image_prompt', sa.String(), nullable=True))
# Add prompt fields to storyboards table
op.add_column('storyboards', sa.Column('original_text', sa.String(), nullable=True))
op.add_column('storyboards', sa.Column('merge_image_prompt', sa.String(), nullable=True))
op.add_column('storyboards', sa.Column('video_prompt', sa.String(), nullable=True))
def downgrade() -> None:
# Remove fields from storyboards
op.drop_column('storyboards', 'video_prompt')
op.drop_column('storyboards', 'merge_image_prompt')
op.drop_column('storyboards', 'original_text')
# Remove field from assets
op.drop_column('assets', 'image_prompt')

View File

@@ -0,0 +1,42 @@
"""add provider to tasks
Revision ID: add_provider_to_tasks
Revises: bfac9b8e32f5
Create Date: 2024-02-11
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'add_provider_to_tasks'
down_revision = 'bfac9b8e32f5'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Add provider column to tasks table"""
# Add provider column (nullable, indexed)
op.add_column('tasks', sa.Column('provider', sa.String(), nullable=True))
# Create index on provider column for faster queries
op.create_index(op.f('ix_tasks_provider'), 'tasks', ['provider'], unique=False)
# Optional: Migrate existing data by extracting provider from params
# This is a data migration that can be run separately if needed
op.execute("""
UPDATE tasks
SET provider = params->>'provider'
WHERE params->>'provider' IS NOT NULL
""")
def downgrade() -> None:
"""Remove provider column from tasks table"""
# Drop index first
op.drop_index(op.f('ix_tasks_provider'), table_name='tasks')
# Drop column
op.drop_column('tasks', 'provider')

View File

@@ -0,0 +1,50 @@
"""add task management fields
Revision ID: add_task_mgmt_fields
Revises: add_indexes_opt
Create Date: 2026-01-14
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'add_task_mgmt_fields'
down_revision = 'add_indexes_opt'
branch_labels = None
depends_on = None
def upgrade():
# Add retry configuration fields
op.add_column('tasks', sa.Column('retry_count', sa.Integer(), nullable=False, server_default='0'))
op.add_column('tasks', sa.Column('max_retries', sa.Integer(), nullable=False, server_default='3'))
# Add timestamp fields for task lifecycle
op.add_column('tasks', sa.Column('started_at', sa.Float(), nullable=True))
op.add_column('tasks', sa.Column('completed_at', sa.Float(), nullable=True))
# Add user context fields
op.add_column('tasks', sa.Column('user_id', sa.String(), nullable=True))
op.add_column('tasks', sa.Column('project_id', sa.String(), nullable=True))
# Add indexes for new fields
op.create_index('idx_tasks_user_id', 'tasks', ['user_id'])
op.create_index('idx_tasks_project_id', 'tasks', ['project_id'])
# Note: deleted_at column already exists from previous migration
def downgrade():
# Remove indexes
op.drop_index('idx_tasks_project_id', table_name='tasks')
op.drop_index('idx_tasks_user_id', table_name='tasks')
# Remove columns
op.drop_column('tasks', 'project_id')
op.drop_column('tasks', 'user_id')
op.drop_column('tasks', 'completed_at')
op.drop_column('tasks', 'started_at')
op.drop_column('tasks', 'max_retries')
op.drop_column('tasks', 'retry_count')

View File

@@ -0,0 +1,54 @@
"""add user_sessions table
Revision ID: add_user_sessions
Revises: b546dbb9df98
Create Date: 2026-03-09
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = 'add_user_sessions'
down_revision: Union[str, Sequence[str], None] = 'b546dbb9df98'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
'user_sessions',
sa.Column('id', sa.String(), nullable=False),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('session_family_id', sa.String(), nullable=False),
sa.Column('refresh_token_hash', sa.String(), nullable=False),
sa.Column('status', sa.String(), nullable=False, server_default='active'),
sa.Column('created_at', sa.Float(), nullable=False),
sa.Column('updated_at', sa.Float(), nullable=False),
sa.Column('expires_at', sa.Float(), nullable=False),
sa.Column('last_used_at', sa.Float(), nullable=True),
sa.Column('revoked_at', sa.Float(), nullable=True),
sa.Column('revoked_reason', sa.String(), nullable=True),
sa.Column('replaced_by_session_id', sa.String(), nullable=True),
sa.Column('ip_address', sa.String(), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('device_name', sa.String(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(op.f('ix_user_sessions_user_id'), 'user_sessions', ['user_id'], unique=False)
op.create_index(op.f('ix_user_sessions_session_family_id'), 'user_sessions', ['session_family_id'], unique=False)
op.create_index(op.f('ix_user_sessions_refresh_token_hash'), 'user_sessions', ['refresh_token_hash'], unique=False)
op.create_index(op.f('ix_user_sessions_status'), 'user_sessions', ['status'], unique=False)
op.create_index(op.f('ix_user_sessions_revoked_at'), 'user_sessions', ['revoked_at'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_user_sessions_revoked_at'), table_name='user_sessions')
op.drop_index(op.f('ix_user_sessions_status'), table_name='user_sessions')
op.drop_index(op.f('ix_user_sessions_refresh_token_hash'), table_name='user_sessions')
op.drop_index(op.f('ix_user_sessions_session_family_id'), table_name='user_sessions')
op.drop_index(op.f('ix_user_sessions_user_id'), table_name='user_sessions')
op.drop_table('user_sessions')

View File

@@ -0,0 +1,72 @@
"""add_users_and_api_keys_tables
Revision ID: b546dbb9df98
Revises: rename_style_preset
Create Date: 2026-02-14 13:01:36.394119
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'b546dbb9df98'
down_revision: Union[str, Sequence[str], None] = 'rename_style_preset'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema - Add users and user_api_keys tables."""
# Create users table
op.create_table(
'users',
sa.Column('id', sa.String(), nullable=False),
sa.Column('username', sa.String(), nullable=False),
sa.Column('email', sa.String(), nullable=True),
sa.Column('password_hash', sa.String(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='1'),
sa.Column('is_superuser', sa.Boolean(), nullable=False, server_default='0'),
sa.Column('permissions', sa.JSON(), nullable=False, server_default='[]'),
sa.Column('roles', sa.JSON(), nullable=False, server_default='[]'),
sa.Column('created_at', sa.Float(), nullable=False),
sa.Column('updated_at', sa.Float(), nullable=False),
sa.Column('last_login', sa.Float(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=False)
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=False)
# Create user_api_keys table
op.create_table(
'user_api_keys',
sa.Column('id', sa.String(), nullable=False),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('provider', sa.String(), nullable=False),
sa.Column('encrypted_key', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='1'),
sa.Column('created_at', sa.Float(), nullable=False),
sa.Column('updated_at', sa.Float(), nullable=False),
sa.Column('last_used_at', sa.Float(), nullable=True),
sa.Column('usage_count', sa.Integer(), nullable=False, server_default='0'),
sa.Column('extra_config', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_api_keys_provider'), 'user_api_keys', ['provider'], unique=False)
op.create_index(op.f('ix_user_api_keys_user_id'), 'user_api_keys', ['user_id'], unique=False)
def downgrade() -> None:
"""Downgrade schema - Remove users and user_api_keys tables."""
op.drop_index(op.f('ix_user_api_keys_user_id'), table_name='user_api_keys')
op.drop_index(op.f('ix_user_api_keys_provider'), table_name='user_api_keys')
op.drop_table('user_api_keys')
op.drop_index(op.f('ix_users_username'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')

View File

@@ -0,0 +1,30 @@
"""add location and time to storyboards
Revision ID: bfac9b8e32f5
Revises: 72f609dd9e66
Create Date: 2026-01-11 00:49:48.323949
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'bfac9b8e32f5'
down_revision: Union[str, Sequence[str], None] = '72f609dd9e66'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
op.add_column('storyboards', sa.Column('location', sa.String(), nullable=True))
op.add_column('storyboards', sa.Column('time', sa.String(), nullable=True))
def downgrade() -> None:
"""Downgrade schema."""
op.drop_column('storyboards', 'time')
op.drop_column('storyboards', 'location')

View File

@@ -0,0 +1,34 @@
"""rename style_preset to style_id
Revision ID: rename_style_preset
Revises: add_cinematic_fields, add_provider_to_tasks
Create Date: 2024-02-11
"""
from alembic import op
import sqlalchemy as sa
from typing import Union, Sequence
# revision identifiers, used by Alembic.
revision = 'rename_style_preset'
down_revision: Union[str, Sequence[str], None] = ('add_cinematic_fields', 'add_provider_to_tasks')
branch_labels = None
depends_on = None
def upgrade():
"""Rename style_preset column to style_id in projects table"""
# SQLite doesn't support ALTER COLUMN RENAME directly
# We need to use a workaround with table recreation
with op.batch_alter_table('projects', schema=None) as batch_op:
# Rename the column
batch_op.alter_column('style_preset', new_column_name='style_id')
def downgrade():
"""Revert style_id column back to style_preset"""
with op.batch_alter_table('projects', schema=None) as batch_op:
# Rename back
batch_op.alter_column('style_id', new_column_name='style_preset')