Skip to main content
Glama

cognee-mcp

211ab850ef3d_add_sync_operations_table.py3.98 kB
"""Add sync_operations table Revision ID: 211ab850ef3d Revises: 9e7a3cb85175 Create Date: 2025-09-10 20:11:13.534829 """ from typing import Sequence, Union from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision: str = "211ab850ef3d" down_revision: Union[str, None] = "45957f0a9849" branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### # Check if table already exists (it might be created by Base.metadata.create_all() in initial migration) connection = op.get_bind() inspector = sa.inspect(connection) if "sync_operations" not in inspector.get_table_names(): # Table doesn't exist, create it normally op.create_table( "sync_operations", sa.Column("id", sa.UUID(), nullable=False), sa.Column("run_id", sa.Text(), nullable=True), sa.Column( "status", sa.Enum( "STARTED", "IN_PROGRESS", "COMPLETED", "FAILED", "CANCELLED", name="syncstatus", create_type=False, ), nullable=True, ), sa.Column("progress_percentage", sa.Integer(), nullable=True), sa.Column("dataset_ids", sa.JSON(), nullable=True), sa.Column("dataset_names", sa.JSON(), nullable=True), sa.Column("user_id", sa.UUID(), nullable=True), sa.Column("created_at", sa.DateTime(timezone=True), nullable=True), sa.Column("started_at", sa.DateTime(timezone=True), nullable=True), sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True), sa.Column("total_records_to_sync", sa.Integer(), nullable=True), sa.Column("total_records_to_download", sa.Integer(), nullable=True), sa.Column("total_records_to_upload", sa.Integer(), nullable=True), sa.Column("records_downloaded", sa.Integer(), nullable=True), sa.Column("records_uploaded", sa.Integer(), nullable=True), sa.Column("bytes_downloaded", sa.Integer(), nullable=True), sa.Column("bytes_uploaded", sa.Integer(), nullable=True), sa.Column("dataset_sync_hashes", sa.JSON(), nullable=True), sa.Column("error_message", sa.Text(), nullable=True), sa.Column("retry_count", sa.Integer(), nullable=True), sa.PrimaryKeyConstraint("id"), ) op.create_index( op.f("ix_sync_operations_run_id"), "sync_operations", ["run_id"], unique=True ) op.create_index( op.f("ix_sync_operations_user_id"), "sync_operations", ["user_id"], unique=False ) else: # Table already exists, but we might need to add missing columns or indexes # For now, just log that the table already exists print("sync_operations table already exists, skipping creation") # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### # Only drop if table exists (might have been created by Base.metadata.create_all()) connection = op.get_bind() inspector = sa.inspect(connection) if "sync_operations" in inspector.get_table_names(): op.drop_index(op.f("ix_sync_operations_user_id"), table_name="sync_operations") op.drop_index(op.f("ix_sync_operations_run_id"), table_name="sync_operations") op.drop_table("sync_operations") # Drop the enum type that was created (only if no other tables are using it) sa.Enum(name="syncstatus").drop(op.get_bind(), checkfirst=True) else: print("sync_operations table doesn't exist, skipping downgrade") # ### end Alembic commands ###

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/topoteretes/cognee'

If you have feedback or need assistance with the MCP directory API, please join our Discord server