diff --git a/mcpgateway/alembic/versions/34492f99a0c4_add_comprehensive_metadata_to_all_.py b/mcpgateway/alembic/versions/34492f99a0c4_add_comprehensive_metadata_to_all_.py index 0b890200..135a0632 100644 --- a/mcpgateway/alembic/versions/34492f99a0c4_add_comprehensive_metadata_to_all_.py +++ b/mcpgateway/alembic/versions/34492f99a0c4_add_comprehensive_metadata_to_all_.py @@ -27,61 +27,130 @@ def upgrade() -> None: """Add comprehensive metadata columns to all entity tables for audit tracking.""" + bind = op.get_bind() + inspector = sa.inspect(bind) + + # Check if this is a fresh database without existing tables + if not inspector.has_table("gateways"): + print("Fresh database detected. Skipping metadata migration.") + return + tables = ["tools", "resources", "prompts", "servers", "gateways"] + # Define metadata columns to add + metadata_columns = [ + ("created_by", sa.String(), True), + ("created_from_ip", sa.String(), True), + ("created_via", sa.String(), True), + ("created_user_agent", sa.Text(), True), + ("modified_by", sa.String(), True), + ("modified_from_ip", sa.String(), True), + ("modified_via", sa.String(), True), + ("modified_user_agent", sa.Text(), True), + ("import_batch_id", sa.String(), True), + ("federation_source", sa.String(), True), + ("version", sa.Integer(), False, "1"), # Not nullable, with default + ] + + # Add columns to each table if they don't exist + for table in tables: + if inspector.has_table(table): + columns = [col["name"] for col in inspector.get_columns(table)] + + for col_name, col_type, nullable, *default in metadata_columns: + if col_name not in columns: + try: + if default: + op.add_column(table, sa.Column(col_name, col_type, nullable=nullable, server_default=default[0])) + else: + op.add_column(table, sa.Column(col_name, col_type, nullable=nullable)) + print(f"Added column {col_name} to {table}") + except Exception as e: + print(f"Warning: Could not add column {col_name} to {table}: {e}") + + # Create indexes for query performance (safe B-tree indexes) + # Note: modified_at column doesn't exist in schema, so we skip it + index_definitions = [ + ("created_by", ["created_by"]), + ("created_at", ["created_at"]), + ("created_via", ["created_via"]), + ] + for table in tables: - # Creation metadata (nullable=True for backwards compatibility) - op.add_column(table, sa.Column("created_by", sa.String(), nullable=True)) - op.add_column(table, sa.Column("created_from_ip", sa.String(), nullable=True)) - op.add_column(table, sa.Column("created_via", sa.String(), nullable=True)) - op.add_column(table, sa.Column("created_user_agent", sa.Text(), nullable=True)) - - # Modification metadata (nullable=True for backwards compatibility) - op.add_column(table, sa.Column("modified_by", sa.String(), nullable=True)) - op.add_column(table, sa.Column("modified_from_ip", sa.String(), nullable=True)) - op.add_column(table, sa.Column("modified_via", sa.String(), nullable=True)) - op.add_column(table, sa.Column("modified_user_agent", sa.Text(), nullable=True)) - - # Source tracking (nullable=True for backwards compatibility) - op.add_column(table, sa.Column("import_batch_id", sa.String(), nullable=True)) - op.add_column(table, sa.Column("federation_source", sa.String(), nullable=True)) - op.add_column(table, sa.Column("version", sa.Integer(), nullable=False, server_default="1")) - - # Create indexes for query performance (PostgreSQL compatible, SQLite ignores) - try: - op.create_index(f"idx_{table}_created_by", table, ["created_by"]) - op.create_index(f"idx_{table}_created_at", table, ["created_at"]) - op.create_index(f"idx_{table}_modified_at", table, ["modified_at"]) - op.create_index(f"idx_{table}_created_via", table, ["created_via"]) - except Exception: # nosec B110 - database compatibility - # SQLite doesn't support all index types, skip silently - pass + if inspector.has_table(table): + try: + existing_indexes = [idx["name"] for idx in inspector.get_indexes(table)] + except Exception as e: + print(f"Warning: Could not get indexes for {table}: {e}") + continue + + for index_suffix, columns in index_definitions: + index_name = f"idx_{table}_{index_suffix}" + if index_name not in existing_indexes: + # Check if the column exists before creating index + table_columns = [col["name"] for col in inspector.get_columns(table)] + if all(col in table_columns for col in columns): + try: + op.create_index(index_name, table, columns) + print(f"Created index {index_name}") + except Exception as e: + print(f"Warning: Could not create index {index_name}: {e}") + else: + print(f"Skipping index {index_name} - required columns {columns} not found in {table}") def downgrade() -> None: """Remove comprehensive metadata columns from all entity tables.""" + bind = op.get_bind() + inspector = sa.inspect(bind) + tables = ["tools", "resources", "prompts", "servers", "gateways"] + # Index names to drop (modified_at doesn't exist, so skip it) + index_suffixes = ["created_by", "created_at", "created_via"] + + # Drop indexes first (if they exist) for table in tables: - # Drop indexes first (if they exist) - try: - op.drop_index(f"idx_{table}_created_by", table) - op.drop_index(f"idx_{table}_created_at", table) - op.drop_index(f"idx_{table}_modified_at", table) - op.drop_index(f"idx_{table}_created_via", table) - except Exception: # nosec B110 - database compatibility - # Indexes might not exist on SQLite - pass - - # Drop metadata columns - op.drop_column(table, "version") - op.drop_column(table, "federation_source") - op.drop_column(table, "import_batch_id") - op.drop_column(table, "modified_user_agent") - op.drop_column(table, "modified_via") - op.drop_column(table, "modified_from_ip") - op.drop_column(table, "modified_by") - op.drop_column(table, "created_user_agent") - op.drop_column(table, "created_via") - op.drop_column(table, "created_from_ip") - op.drop_column(table, "created_by") + if inspector.has_table(table): + try: + existing_indexes = [idx["name"] for idx in inspector.get_indexes(table)] + except Exception as e: + print(f"Warning: Could not get indexes for {table}: {e}") + continue + + for suffix in index_suffixes: + index_name = f"idx_{table}_{suffix}" + if index_name in existing_indexes: + try: + op.drop_index(index_name, table) + print(f"Dropped index {index_name}") + except Exception as e: + print(f"Warning: Could not drop index {index_name}: {e}") + + # Metadata columns to drop (in reverse order for safety) + metadata_columns = [ + "version", + "federation_source", + "import_batch_id", + "modified_user_agent", + "modified_via", + "modified_from_ip", + "modified_by", + "created_user_agent", + "created_via", + "created_from_ip", + "created_by", + ] + + # Drop metadata columns (if they exist) + for table in reversed(tables): # Reverse order for safety + if inspector.has_table(table): + columns = [col["name"] for col in inspector.get_columns(table)] + + for col_name in metadata_columns: + if col_name in columns: + try: + op.drop_column(table, col_name) + print(f"Dropped column {col_name} from {table}") + except Exception as e: + print(f"Warning: Could not drop column {col_name} from {table}: {e}") diff --git a/mcpgateway/alembic/versions/3b17fdc40a8d_add_passthrough_headers_to_gateways_and_.py b/mcpgateway/alembic/versions/3b17fdc40a8d_add_passthrough_headers_to_gateways_and_.py index 89267cc6..b294484d 100644 --- a/mcpgateway/alembic/versions/3b17fdc40a8d_add_passthrough_headers_to_gateways_and_.py +++ b/mcpgateway/alembic/versions/3b17fdc40a8d_add_passthrough_headers_to_gateways_and_.py @@ -27,17 +27,36 @@ def upgrade() -> None: """Upgrade schema.""" - # Create global_config table - op.create_table("global_config", sa.Column("id", sa.Integer(), nullable=False), sa.Column("passthrough_headers", sa.JSON(), nullable=True), sa.PrimaryKeyConstraint("id")) + bind = op.get_bind() + inspector = sa.inspect(bind) - # Add passthrough_headers column to gateways table - op.add_column("gateways", sa.Column("passthrough_headers", sa.JSON(), nullable=True)) + # Check if this is a fresh database without existing tables + if not inspector.has_table("gateways"): + print("Fresh database detected. Skipping passthrough headers migration.") + return + + # Create global_config table if it doesn't exist + if not inspector.has_table("global_config"): + op.create_table("global_config", sa.Column("id", sa.Integer(), nullable=False), sa.Column("passthrough_headers", sa.JSON(), nullable=True), sa.PrimaryKeyConstraint("id")) + + # Add passthrough_headers column to gateways table if it doesn't exist + if inspector.has_table("gateways"): + columns = [col["name"] for col in inspector.get_columns("gateways")] + if "passthrough_headers" not in columns: + op.add_column("gateways", sa.Column("passthrough_headers", sa.JSON(), nullable=True)) def downgrade() -> None: """Downgrade schema.""" - # Remove passthrough_headers column from gateways table - op.drop_column("gateways", "passthrough_headers") - - # Drop global_config table - op.drop_table("global_config") + bind = op.get_bind() + inspector = sa.inspect(bind) + + # Remove passthrough_headers column from gateways table if it exists + if inspector.has_table("gateways"): + columns = [col["name"] for col in inspector.get_columns("gateways")] + if "passthrough_headers" in columns: + op.drop_column("gateways", "passthrough_headers") + + # Drop global_config table if it exists + if inspector.has_table("global_config"): + op.drop_table("global_config") diff --git a/mcpgateway/alembic/versions/add_a2a_agents_and_metrics.py b/mcpgateway/alembic/versions/add_a2a_agents_and_metrics.py index 91f87ec4..9e98ebbd 100644 --- a/mcpgateway/alembic/versions/add_a2a_agents_and_metrics.py +++ b/mcpgateway/alembic/versions/add_a2a_agents_and_metrics.py @@ -34,7 +34,7 @@ def upgrade() -> None: existing_tables = inspector.get_table_names() if "a2a_agents" not in existing_tables: - # Create a2a_agents table + # Create a2a_agents table with unique constraints included (SQLite compatible) op.create_table( "a2a_agents", sa.Column("id", sa.String(36), primary_key=True), @@ -65,12 +65,10 @@ def upgrade() -> None: sa.Column("import_batch_id", sa.String()), sa.Column("federation_source", sa.String()), sa.Column("version", sa.Integer(), nullable=False, server_default="1"), + sa.UniqueConstraint("name", name="uq_a2a_agents_name"), + sa.UniqueConstraint("slug", name="uq_a2a_agents_slug"), ) - # Create unique constraints - op.create_unique_constraint("uq_a2a_agents_name", "a2a_agents", ["name"]) - op.create_unique_constraint("uq_a2a_agents_slug", "a2a_agents", ["slug"]) - if "a2a_agent_metrics" not in existing_tables: # Create a2a_agent_metrics table op.create_table( @@ -93,57 +91,90 @@ def upgrade() -> None: ) # Create indexes for performance (check if they exist first) - existing_indexes = [] - try: - existing_indexes = [idx["name"] for idx in inspector.get_indexes("a2a_agents")] - except Exception: - pass - - if "idx_a2a_agents_enabled" not in existing_indexes: + # Only create indexes if tables were actually created + if "a2a_agents" in existing_tables: try: - op.create_index("idx_a2a_agents_enabled", "a2a_agents", ["enabled"]) + existing_indexes = [idx["name"] for idx in inspector.get_indexes("a2a_agents")] except Exception: - pass + existing_indexes = [] + + if "idx_a2a_agents_enabled" not in existing_indexes: + try: + op.create_index("idx_a2a_agents_enabled", "a2a_agents", ["enabled"]) + except Exception as e: + print(f"Warning: Could not create index idx_a2a_agents_enabled: {e}") + + if "idx_a2a_agents_agent_type" not in existing_indexes: + try: + op.create_index("idx_a2a_agents_agent_type", "a2a_agents", ["agent_type"]) + except Exception as e: + print(f"Warning: Could not create index idx_a2a_agents_agent_type: {e}") - if "idx_a2a_agents_agent_type" not in existing_indexes: + # Create B-tree index for tags (safer than GIN, works on both PostgreSQL and SQLite) + if "idx_a2a_agents_tags" not in existing_indexes: + try: + op.create_index("idx_a2a_agents_tags", "a2a_agents", ["tags"]) + except Exception as e: + print(f"Warning: Could not create index idx_a2a_agents_tags: {e}") + + # Metrics table indexes + if "a2a_agent_metrics" in existing_tables: try: - op.create_index("idx_a2a_agents_agent_type", "a2a_agents", ["agent_type"]) + existing_metrics_indexes = [idx["name"] for idx in inspector.get_indexes("a2a_agent_metrics")] except Exception: - pass + existing_metrics_indexes = [] - # Metrics table indexes - try: - existing_indexes = [idx["name"] for idx in inspector.get_indexes("a2a_agent_metrics")] - if "idx_a2a_agent_metrics_agent_id" not in existing_indexes: - op.create_index("idx_a2a_agent_metrics_agent_id", "a2a_agent_metrics", ["a2a_agent_id"]) - if "idx_a2a_agent_metrics_timestamp" not in existing_indexes: - op.create_index("idx_a2a_agent_metrics_timestamp", "a2a_agent_metrics", ["timestamp"]) - except Exception: - pass - - # Create GIN indexes for tags on PostgreSQL (ignored on SQLite) - try: - if "idx_a2a_agents_tags" not in existing_indexes: - op.create_index("idx_a2a_agents_tags", "a2a_agents", ["tags"], postgresql_using="gin") - except Exception: # nosec B110 - database compatibility - pass # SQLite doesn't support GIN indexes + if "idx_a2a_agent_metrics_agent_id" not in existing_metrics_indexes: + try: + op.create_index("idx_a2a_agent_metrics_agent_id", "a2a_agent_metrics", ["a2a_agent_id"]) + except Exception as e: + print(f"Warning: Could not create index idx_a2a_agent_metrics_agent_id: {e}") + + if "idx_a2a_agent_metrics_timestamp" not in existing_metrics_indexes: + try: + op.create_index("idx_a2a_agent_metrics_timestamp", "a2a_agent_metrics", ["timestamp"]) + except Exception as e: + print(f"Warning: Could not create index idx_a2a_agent_metrics_timestamp: {e}") def downgrade() -> None: """Reverse the A2A agents and metrics tables.""" + # Check if tables exist before trying to drop indexes/tables + conn = op.get_bind() + inspector = sa.inspect(conn) + existing_tables = inspector.get_table_names() - # Drop indexes first - try: - op.drop_index("idx_a2a_agents_tags", "a2a_agents") - except Exception: # nosec B110 - database compatibility - pass - - op.drop_index("idx_a2a_agent_metrics_timestamp", "a2a_agent_metrics") - op.drop_index("idx_a2a_agent_metrics_agent_id", "a2a_agent_metrics") - op.drop_index("idx_a2a_agents_agent_type", "a2a_agents") - op.drop_index("idx_a2a_agents_enabled", "a2a_agents") - - # Drop tables - op.drop_table("server_a2a_association") - op.drop_table("a2a_agent_metrics") - op.drop_table("a2a_agents") + # Drop indexes first (if they exist) + if "a2a_agents" in existing_tables: + try: + existing_indexes = [idx["name"] for idx in inspector.get_indexes("a2a_agents")] + + for index_name in ["idx_a2a_agents_tags", "idx_a2a_agents_agent_type", "idx_a2a_agents_enabled"]: + if index_name in existing_indexes: + try: + op.drop_index(index_name, "a2a_agents") + except Exception as e: + print(f"Warning: Could not drop index {index_name}: {e}") + except Exception as e: + print(f"Warning: Could not get indexes for a2a_agents: {e}") + + if "a2a_agent_metrics" in existing_tables: + try: + existing_metrics_indexes = [idx["name"] for idx in inspector.get_indexes("a2a_agent_metrics")] + + for index_name in ["idx_a2a_agent_metrics_timestamp", "idx_a2a_agent_metrics_agent_id"]: + if index_name in existing_metrics_indexes: + try: + op.drop_index(index_name, "a2a_agent_metrics") + except Exception as e: + print(f"Warning: Could not drop index {index_name}: {e}") + except Exception as e: + print(f"Warning: Could not get indexes for a2a_agent_metrics: {e}") + + # Drop tables (if they exist) + for table_name in ["server_a2a_association", "a2a_agent_metrics", "a2a_agents"]: + if table_name in existing_tables: + try: + op.drop_table(table_name) + except Exception as e: + print(f"Warning: Could not drop table {table_name}: {e}") diff --git a/mcpgateway/alembic/versions/c9dd86c0aac9_remove_original_name_slug_and_added_.py b/mcpgateway/alembic/versions/c9dd86c0aac9_remove_original_name_slug_and_added_.py index 44332423..19e690d3 100644 --- a/mcpgateway/alembic/versions/c9dd86c0aac9_remove_original_name_slug_and_added_.py +++ b/mcpgateway/alembic/versions/c9dd86c0aac9_remove_original_name_slug_and_added_.py @@ -27,20 +27,56 @@ def upgrade() -> None: """Upgrade schema.""" - # Remove original_name_slug column - op.alter_column("tools", "original_name_slug", new_column_name="custom_name_slug") + bind = op.get_bind() + inspector = sa.inspect(bind) - # Add custom_name column - op.add_column("tools", sa.Column("custom_name", sa.String(), nullable=True)) - op.execute("UPDATE tools SET custom_name = original_name") + # Check if this is a fresh database without existing tables + if not inspector.has_table("tools"): + print("Fresh database detected. Skipping custom name migration.") + return + + # Only modify tables if they exist and have the columns we're trying to modify + if inspector.has_table("tools"): + columns = [col["name"] for col in inspector.get_columns("tools")] + + # Rename original_name_slug to custom_name_slug if it exists + if "original_name_slug" in columns: + try: + op.alter_column("tools", "original_name_slug", new_column_name="custom_name_slug") + except Exception as e: + print(f"Warning: Could not rename original_name_slug to custom_name_slug: {e}") + + # Add custom_name column if it doesn't exist + if "custom_name" not in columns: + try: + op.add_column("tools", sa.Column("custom_name", sa.String(), nullable=True)) + # Only try to update if original_name column exists + if "original_name" in columns: + op.execute("UPDATE tools SET custom_name = original_name") + except Exception as e: + print(f"Warning: Could not add custom_name column: {e}") # ### end Alembic commands ### def downgrade() -> None: """Downgrade schema.""" - # Remove custom_name column - op.drop_column("tools", "custom_name") + bind = op.get_bind() + inspector = sa.inspect(bind) + + if inspector.has_table("tools"): + columns = [col["name"] for col in inspector.get_columns("tools")] + + # Remove custom_name column if it exists + if "custom_name" in columns: + try: + op.drop_column("tools", "custom_name") + except Exception as e: + print(f"Warning: Could not drop custom_name column: {e}") - # Add original_name_slug column back - op.alter_column("tools", "custom_name_slug", new_column_name="original_name_slug") + # Rename custom_name_slug back to original_name_slug if it exists + if "custom_name_slug" in columns: + try: + op.alter_column("tools", "custom_name_slug", new_column_name="original_name_slug") + except Exception as e: + print(f"Warning: Could not rename custom_name_slug to original_name_slug: {e}") # ### end Alembic commands ### diff --git a/mcpgateway/alembic/versions/cc7b95fec5d9_add_tags_support_to_all_entities.py b/mcpgateway/alembic/versions/cc7b95fec5d9_add_tags_support_to_all_entities.py index 475e126d..09c30901 100644 --- a/mcpgateway/alembic/versions/cc7b95fec5d9_add_tags_support_to_all_entities.py +++ b/mcpgateway/alembic/versions/cc7b95fec5d9_add_tags_support_to_all_entities.py @@ -17,6 +17,7 @@ # Third-Party from alembic import op import sqlalchemy as sa +from sqlalchemy.dialects import postgresql as pg # revision identifiers, used by Alembic. revision: str = "cc7b95fec5d9" @@ -27,50 +28,71 @@ def upgrade() -> None: """Upgrade schema - Add tags JSON column to all entity tables.""" - # Add tags column to tools table - op.add_column("tools", sa.Column("tags", sa.JSON(), nullable=True, server_default="[]")) + bind = op.get_bind() + inspector = sa.inspect(bind) - # Add tags column to resources table - op.add_column("resources", sa.Column("tags", sa.JSON(), nullable=True, server_default="[]")) + # Check if this is a fresh database without existing tables + if not inspector.has_table("gateways"): + print("Fresh database detected. Skipping migration.") + return - # Add tags column to prompts table - op.add_column("prompts", sa.Column("tags", sa.JSON(), nullable=True, server_default="[]")) + # Define tables to add tags to + tables = ["tools", "resources", "prompts", "servers", "gateways"] - # Add tags column to servers table - op.add_column("servers", sa.Column("tags", sa.JSON(), nullable=True, server_default="[]")) + # Add tags column to each table if it doesn't exist + for table_name in tables: + if inspector.has_table(table_name): + columns = [col["name"] for col in inspector.get_columns(table_name)] + if "tags" not in columns: + is_postgresql = bind.dialect.name == "postgresql" + col_type = pg.JSONB() if is_postgresql else sa.JSON() + default = sa.text("'[]'::jsonb") if is_postgresql else sa.text("'[]'") + op.add_column( + table_name, + sa.Column("tags", col_type, nullable=True, server_default=default), + ) - # Add tags column to gateways table - op.add_column("gateways", sa.Column("tags", sa.JSON(), nullable=True, server_default="[]")) - - # Create indexes for PostgreSQL (GIN indexes for JSON) - # These will be ignored on SQLite but work on PostgreSQL - try: - op.create_index("idx_tools_tags", "tools", ["tags"], postgresql_using="gin") - op.create_index("idx_resources_tags", "resources", ["tags"], postgresql_using="gin") - op.create_index("idx_prompts_tags", "prompts", ["tags"], postgresql_using="gin") - op.create_index("idx_servers_tags", "servers", ["tags"], postgresql_using="gin") - op.create_index("idx_gateways_tags", "gateways", ["tags"], postgresql_using="gin") - except Exception: # nosec B110 - database compatibility - # SQLite doesn't support GIN indexes, skip silently - pass + # Create safe B-tree indexes (avoid GIN to prevent transaction abortion) + # GIN indexes can be added separately after migration completes successfully + for table_name in tables: + if inspector.has_table(table_name): + index_name = f"idx_{table_name}_tags" + try: + existing_indexes = [idx["name"] for idx in inspector.get_indexes(table_name)] + if index_name not in existing_indexes: + # Create simple B-tree index that works on both PostgreSQL and SQLite + # This avoids PostgreSQL GIN operator class errors that abort transactions + op.create_index(index_name, table_name, ["tags"]) + print(f"Created B-tree index {index_name} on {table_name}.tags") + except Exception as e: + print(f"Warning: Could not create index {index_name}: {e}") def downgrade() -> None: """Downgrade schema - Remove tags columns from all entity tables.""" + bind = op.get_bind() + inspector = sa.inspect(bind) + + # Define tables to remove tags from + tables = ["tools", "resources", "prompts", "servers", "gateways"] + # Drop indexes first (if they exist) - try: - op.drop_index("idx_tools_tags", "tools") - op.drop_index("idx_resources_tags", "resources") - op.drop_index("idx_prompts_tags", "prompts") - op.drop_index("idx_servers_tags", "servers") - op.drop_index("idx_gateways_tags", "gateways") - except Exception: # nosec B110 - database compatibility - # Indexes might not exist on SQLite - pass - - # Drop tags columns - op.drop_column("gateways", "tags") - op.drop_column("servers", "tags") - op.drop_column("prompts", "tags") - op.drop_column("resources", "tags") - op.drop_column("tools", "tags") + for table_name in tables: + if inspector.has_table(table_name): + index_name = f"idx_{table_name}_tags" + try: + existing_indexes = [idx["name"] for idx in inspector.get_indexes(table_name)] + if index_name in existing_indexes: + op.drop_index(index_name, table_name=table_name) + except Exception as e: + print(f"Warning: Could not drop index {index_name}: {e}") + + # Drop tags columns (if they exist) + for table_name in reversed(tables): # Reverse order for safety + if inspector.has_table(table_name): + columns = [col["name"] for col in inspector.get_columns(table_name)] + if "tags" in columns: + try: + op.drop_column(table_name, "tags") + except Exception as e: + print(f"Warning: Could not drop column tags from {table_name}: {e}") diff --git a/mcpgateway/alembic/versions/e4fc04d1a442_add_annotations_to_tables.py b/mcpgateway/alembic/versions/e4fc04d1a442_add_annotations_to_tables.py index 2aa33c7d..f3dfaf44 100644 --- a/mcpgateway/alembic/versions/e4fc04d1a442_add_annotations_to_tables.py +++ b/mcpgateway/alembic/versions/e4fc04d1a442_add_annotations_to_tables.py @@ -40,7 +40,13 @@ def upgrade() -> None: print("Fresh database detected. Skipping migration.") return - op.add_column("tools", sa.Column("annotations", sa.JSON(), server_default=sa.text("'{}'"), nullable=False)) + if inspector.has_table("tools"): + columns = [col["name"] for col in inspector.get_columns("tools")] + if "annotations" not in columns: + try: + op.add_column("tools", sa.Column("annotations", sa.JSON(), server_default=sa.text("'{}'"), nullable=False)) + except Exception as e: + print(f"Warning: Could not add annotations column to tools: {e}") def downgrade() -> None: @@ -57,4 +63,10 @@ def downgrade() -> None: print("Fresh database detected. Skipping migration.") return - op.drop_column("tools", "annotations") + if inspector.has_table("tools"): + columns = [col["name"] for col in inspector.get_columns("tools")] + if "annotations" in columns: + try: + op.drop_column("tools", "annotations") + except Exception as e: + print(f"Warning: Could not drop annotations column from tools: {e}") diff --git a/mcpgateway/alembic/versions/e75490e949b1_add_improved_status_to_tables.py b/mcpgateway/alembic/versions/e75490e949b1_add_improved_status_to_tables.py index dc4c4e1c..44849abf 100644 --- a/mcpgateway/alembic/versions/e75490e949b1_add_improved_status_to_tables.py +++ b/mcpgateway/alembic/versions/e75490e949b1_add_improved_status_to_tables.py @@ -30,11 +30,42 @@ def upgrade(): Renames 'is_active' to 'enabled' and adds a new 'reachable' column (default True) in both 'tools' and 'gateways' tables. """ - op.alter_column("tools", "is_active", new_column_name="enabled") - op.add_column("tools", sa.Column("reachable", sa.Boolean(), nullable=False, server_default=sa.true())) + bind = op.get_bind() + inspector = sa.inspect(bind) - op.alter_column("gateways", "is_active", new_column_name="enabled") - op.add_column("gateways", sa.Column("reachable", sa.Boolean(), nullable=False, server_default=sa.true())) + # Check if this is a fresh database without existing tables + if not inspector.has_table("tools") and not inspector.has_table("gateways"): + print("Fresh database detected. Skipping status migration.") + return + + # Only modify tables if they exist and have the columns we're trying to modify + if inspector.has_table("tools"): + columns = [col["name"] for col in inspector.get_columns("tools")] + if "is_active" in columns: + try: + op.alter_column("tools", "is_active", new_column_name="enabled") + except Exception as e: + print(f"Warning: Could not rename is_active to enabled in tools: {e}") + + if "reachable" not in columns: + try: + op.add_column("tools", sa.Column("reachable", sa.Boolean(), nullable=False, server_default=sa.true())) + except Exception as e: + print(f"Warning: Could not add reachable column to tools: {e}") + + if inspector.has_table("gateways"): + columns = [col["name"] for col in inspector.get_columns("gateways")] + if "is_active" in columns: + try: + op.alter_column("gateways", "is_active", new_column_name="enabled") + except Exception as e: + print(f"Warning: Could not rename is_active to enabled in gateways: {e}") + + if "reachable" not in columns: + try: + op.add_column("gateways", sa.Column("reachable", sa.Boolean(), nullable=False, server_default=sa.true())) + except Exception as e: + print(f"Warning: Could not add reachable column to gateways: {e}") def downgrade(): @@ -42,8 +73,31 @@ def downgrade(): Reverts the changes by renaming 'enabled' back to 'is_active' and dropping the 'reachable' column in both 'tools' and 'gateways' tables. """ - op.alter_column("tools", "enabled", new_column_name="is_active") - op.drop_column("tools", "reachable") + bind = op.get_bind() + inspector = sa.inspect(bind) + + if inspector.has_table("tools"): + columns = [col["name"] for col in inspector.get_columns("tools")] + if "enabled" in columns: + try: + op.alter_column("tools", "enabled", new_column_name="is_active") + except Exception as e: + print(f"Warning: Could not rename enabled to is_active in tools: {e}") + if "reachable" in columns: + try: + op.drop_column("tools", "reachable") + except Exception as e: + print(f"Warning: Could not drop reachable column from tools: {e}") - op.alter_column("gateways", "enabled", new_column_name="is_active") - op.drop_column("gateways", "reachable") + if inspector.has_table("gateways"): + columns = [col["name"] for col in inspector.get_columns("gateways")] + if "enabled" in columns: + try: + op.alter_column("gateways", "enabled", new_column_name="is_active") + except Exception as e: + print(f"Warning: Could not rename enabled to is_active in gateways: {e}") + if "reachable" in columns: + try: + op.drop_column("gateways", "reachable") + except Exception as e: + print(f"Warning: Could not drop reachable column from gateways: {e}") diff --git a/mcpgateway/alembic/versions/f8c9d3e2a1b4_add_oauth_config_to_gateways.py b/mcpgateway/alembic/versions/f8c9d3e2a1b4_add_oauth_config_to_gateways.py index 7a88af39..d39e5213 100644 --- a/mcpgateway/alembic/versions/f8c9d3e2a1b4_add_oauth_config_to_gateways.py +++ b/mcpgateway/alembic/versions/f8c9d3e2a1b4_add_oauth_config_to_gateways.py @@ -35,11 +35,19 @@ def upgrade() -> None: print("Fresh database detected. Skipping migration.") return - # Add oauth_config column - with op.batch_alter_table("gateways", schema=None) as batch_op: - batch_op.add_column(sa.Column("oauth_config", sa.JSON(), nullable=True, comment="OAuth 2.0 configuration including grant_type, client_id, encrypted client_secret, URLs, and scopes")) + # Check if column already exists + columns = [col["name"] for col in inspector.get_columns("gateways")] + if "oauth_config" in columns: + print("oauth_config column already exists. Skipping migration.") + return - print("Successfully added oauth_config column to gateways table.") + # Add oauth_config column + try: + with op.batch_alter_table("gateways", schema=None) as batch_op: + batch_op.add_column(sa.Column("oauth_config", sa.JSON(), nullable=True, comment="OAuth 2.0 configuration including grant_type, client_id, encrypted client_secret, URLs, and scopes")) + print("Successfully added oauth_config column to gateways table.") + except Exception as e: + print(f"Warning: Could not add oauth_config column to gateways: {e}") def downgrade() -> None: @@ -52,8 +60,16 @@ def downgrade() -> None: print("Fresh database detected. Skipping migration.") return - # Remove oauth_config column - with op.batch_alter_table("gateways", schema=None) as batch_op: - batch_op.drop_column("oauth_config") + # Check if column exists before trying to drop it + columns = [col["name"] for col in inspector.get_columns("gateways")] + if "oauth_config" not in columns: + print("oauth_config column doesn't exist. Skipping migration.") + return - print("Successfully removed oauth_config column from gateways table.") + # Remove oauth_config column + try: + with op.batch_alter_table("gateways", schema=None) as batch_op: + batch_op.drop_column("oauth_config") + print("Successfully removed oauth_config column from gateways table.") + except Exception as e: + print(f"Warning: Could not drop oauth_config column from gateways: {e}")