diff options
37 files changed, 827 insertions, 693 deletions
| diff --git a/alembic/env.py b/alembic/env.py index 7978da1..91aca63 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -1,15 +1,12 @@ +import asyncio  from logging.config import fileConfig -from sqlalchemy import engine_from_config -from sqlalchemy import pool +from sqlalchemy import Connection, pool +from sqlalchemy.ext.asyncio import async_engine_from_config  from alembic import context - -import sys -sys.path.append(".") -  from metricity.database import build_db_uri -from metricity.models import db +from metricity.models import Base  # this is the Alembic Config object, which provides  # access to the values within the .ini file in use. @@ -21,20 +18,37 @@ fileConfig(config.config_file_name)  # add your model's MetaData object here  # for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = db +target_metadata = Base.metadata  # other values from the config, defined by the needs of env.py,  # can be acquired: -# my_important_option = config.get_main_option("my_important_option")  # ... etc. -config.set_main_option('sqlalchemy.url', build_db_uri()) +config.set_main_option("sqlalchemy.url", build_db_uri()) + +def do_run_migrations(connection: Connection) -> None: +    """Run migrations.""" +    context.configure(connection=connection, target_metadata=target_metadata) + +    with context.begin_transaction(): +        context.run_migrations() + +async def run_async_migrations() -> None: +    """Run migrations asynchronously using the asyncpg driver.""" +    connectable = async_engine_from_config( +        config.get_section(config.config_ini_section), +        prefix="sqlalchemy.", +        poolclass=pool.NullPool, +    ) + +    async with connectable.connect() as connection: +        await connection.run_sync(do_run_migrations) +    await connectable.dispose() -def run_migrations_offline(): -    """Run migrations in 'offline' mode. +def run_migrations_offline() -> None: +    """ +    Run migrations in 'offline' mode.      This configures the context with just a URL      and not an Engine, though an Engine is acceptable @@ -57,26 +71,9 @@ def run_migrations_offline():          context.run_migrations() -def run_migrations_online(): -    """Run migrations in 'online' mode. - -    In this scenario we need to create an Engine -    and associate a connection with the context. - -    """ -    connectable = engine_from_config( -        config.get_section(config.config_ini_section), -        prefix="sqlalchemy.", -        poolclass=pool.NullPool, -    ) - -    with connectable.connect() as connection: -        context.configure( -            connection=connection, target_metadata=target_metadata -        ) - -        with context.begin_transaction(): -            context.run_migrations() +def run_migrations_online() -> None: +    """Run migrations in 'online' mode.""" +    asyncio.run(run_async_migrations())  if context.is_offline_mode(): diff --git a/alembic/script.py.mako b/alembic/script.py.mako index 2c01563..2c87742 100644 --- a/alembic/script.py.mako +++ b/alembic/script.py.mako @@ -16,9 +16,11 @@ branch_labels = ${repr(branch_labels)}  depends_on = ${repr(depends_on)} -def upgrade(): -    ${upgrades if upgrades else "pass"} +def upgrade() -> None: +    """Apply the current migration.""" +    ${upgrades} -def downgrade(): -    ${downgrades if downgrades else "pass"} +def downgrade() -> None: +    """Revert the current migration.""" +    ${downgrades} diff --git a/alembic/versions/03655ce2097b_new_not_null_constraints_on_messages_.py b/alembic/versions/03655ce2097b_new_not_null_constraints_on_messages_.py new file mode 100644 index 0000000..8ad0473 --- /dev/null +++ b/alembic/versions/03655ce2097b_new_not_null_constraints_on_messages_.py @@ -0,0 +1,65 @@ +""" +New NOT NULL constraints on messages and users table. + +Revision ID: 03655ce2097b +Revises: 563a15b2a76e +Create Date: 2023-09-04 20:17:03.543328 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "03655ce2097b" +down_revision = "563a15b2a76e" +branch_labels = None +depends_on = None + + +def upgrade() -> None: +    """Apply the current migration.""" +    # ### commands auto generated by Alembic - please adjust! ### +    op.alter_column("messages", "channel_id", +               existing_type=sa.VARCHAR(), +               nullable=False) +    op.alter_column("messages", "author_id", +               existing_type=sa.VARCHAR(), +               nullable=False) +    op.alter_column("messages", "is_deleted", +               existing_type=sa.BOOLEAN(), +               nullable=False) +    op.alter_column("users", "bot", +               existing_type=sa.BOOLEAN(), +               nullable=False) +    op.alter_column("users", "in_guild", +               existing_type=sa.BOOLEAN(), +               nullable=False) +    op.alter_column("users", "pending", +               existing_type=sa.BOOLEAN(), +               nullable=False) +    # ### end Alembic commands ### + + +def downgrade() -> None: +    """Revert the current migration.""" +    # ### commands auto generated by Alembic - please adjust! ### +    op.alter_column("users", "pending", +               existing_type=sa.BOOLEAN(), +               nullable=True) +    op.alter_column("users", "in_guild", +               existing_type=sa.BOOLEAN(), +               nullable=True) +    op.alter_column("users", "bot", +               existing_type=sa.BOOLEAN(), +               nullable=True) +    op.alter_column("messages", "is_deleted", +               existing_type=sa.BOOLEAN(), +               nullable=True) +    op.alter_column("messages", "author_id", +               existing_type=sa.VARCHAR(), +               nullable=True) +    op.alter_column("messages", "channel_id", +               existing_type=sa.VARCHAR(), +               nullable=True) +    # ### end Alembic commands ### diff --git a/alembic/versions/25f3b8fb9961_add_pending_column_to_user.py b/alembic/versions/25f3b8fb9961_add_pending_column_to_user.py index 5b3b6f6..6a2e4c5 100644 --- a/alembic/versions/25f3b8fb9961_add_pending_column_to_user.py +++ b/alembic/versions/25f3b8fb9961_add_pending_column_to_user.py @@ -1,28 +1,31 @@ -"""add pending column to user +""" +add pending column to user.  Revision ID: 25f3b8fb9961  Revises: a259ab5efcec  Create Date: 2020-12-21 17:42:04.566930  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = '25f3b8fb9961' -down_revision = 'a259ab5efcec' +revision = "25f3b8fb9961" +down_revision = "a259ab5efcec"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.add_column('users', sa.Column('pending', sa.Boolean(), nullable=True)) +    op.add_column("users", sa.Column("pending", sa.Boolean(), nullable=True))      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_column('users', 'pending') +    op.drop_column("users", "pending")      # ### end Alembic commands ### diff --git a/alembic/versions/2743389eb63e_add_all_tables_with_string_keys.py b/alembic/versions/2743389eb63e_add_all_tables_with_string_keys.py index 10bb0d2..9122979 100644 --- a/alembic/versions/2743389eb63e_add_all_tables_with_string_keys.py +++ b/alembic/versions/2743389eb63e_add_all_tables_with_string_keys.py @@ -1,63 +1,66 @@ -"""add all tables with string keys +""" +add all tables with string keys.  Revision ID: 2743389eb63e  Revises: 2e383ecae493  Create Date: 2020-08-25 16:35:38.833315  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = '2743389eb63e' -down_revision = '2e383ecae493' +revision = "2743389eb63e" +down_revision = "2e383ecae493"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.create_table('categories', -    sa.Column('id', sa.String(), nullable=False), -    sa.Column('name', sa.String(), nullable=False), -    sa.PrimaryKeyConstraint('id') +    op.create_table("categories", +    sa.Column("id", sa.String(), nullable=False), +    sa.Column("name", sa.String(), nullable=False), +    sa.PrimaryKeyConstraint("id"),      ) -    op.create_table('users', -    sa.Column('id', sa.String(), nullable=False), -    sa.Column('name', sa.String(), nullable=False), -    sa.Column('avatar_hash', sa.String(), nullable=True), -    sa.Column('joined_at', sa.DateTime(), nullable=False), -    sa.Column('created_at', sa.DateTime(), nullable=False), -    sa.Column('is_staff', sa.Boolean(), nullable=False), -    sa.Column('opt_out', sa.Boolean(), nullable=True), -    sa.Column('bot', sa.Boolean(), nullable=True), -    sa.PrimaryKeyConstraint('id') +    op.create_table("users", +    sa.Column("id", sa.String(), nullable=False), +    sa.Column("name", sa.String(), nullable=False), +    sa.Column("avatar_hash", sa.String(), nullable=True), +    sa.Column("joined_at", sa.DateTime(), nullable=False), +    sa.Column("created_at", sa.DateTime(), nullable=False), +    sa.Column("is_staff", sa.Boolean(), nullable=False), +    sa.Column("opt_out", sa.Boolean(), nullable=True), +    sa.Column("bot", sa.Boolean(), nullable=True), +    sa.PrimaryKeyConstraint("id"),      ) -    op.create_table('channels', -    sa.Column('id', sa.String(), nullable=False), -    sa.Column('name', sa.String(), nullable=False), -    sa.Column('category_id', sa.String(), nullable=True), -    sa.Column('is_staff', sa.Boolean(), nullable=False), -    sa.ForeignKeyConstraint(['category_id'], ['categories.id'], ), -    sa.PrimaryKeyConstraint('id') +    op.create_table("channels", +    sa.Column("id", sa.String(), nullable=False), +    sa.Column("name", sa.String(), nullable=False), +    sa.Column("category_id", sa.String(), nullable=True), +    sa.Column("is_staff", sa.Boolean(), nullable=False), +    sa.ForeignKeyConstraint(["category_id"], ["categories.id"] ), +    sa.PrimaryKeyConstraint("id"),      ) -    op.create_table('messages', -    sa.Column('id', sa.String(), nullable=False), -    sa.Column('channel_id', sa.String(), nullable=True), -    sa.Column('author_id', sa.String(), nullable=True), -    sa.Column('created_at', sa.DateTime(), nullable=True), -    sa.ForeignKeyConstraint(['author_id'], ['users.id'], ondelete='CASCADE'), -    sa.ForeignKeyConstraint(['channel_id'], ['channels.id'], ondelete='CASCADE'), -    sa.PrimaryKeyConstraint('id') +    op.create_table("messages", +    sa.Column("id", sa.String(), nullable=False), +    sa.Column("channel_id", sa.String(), nullable=True), +    sa.Column("author_id", sa.String(), nullable=True), +    sa.Column("created_at", sa.DateTime(), nullable=True), +    sa.ForeignKeyConstraint(["author_id"], ["users.id"], ondelete="CASCADE"), +    sa.ForeignKeyConstraint(["channel_id"], ["channels.id"], ondelete="CASCADE"), +    sa.PrimaryKeyConstraint("id"),      )      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_table('messages') -    op.drop_table('channels') -    op.drop_table('users') -    op.drop_table('categories') +    op.drop_table("messages") +    op.drop_table("channels") +    op.drop_table("users") +    op.drop_table("categories")      # ### end Alembic commands ### diff --git a/alembic/versions/2e383ecae493_remove_all_tables_for_conversion_to_.py b/alembic/versions/2e383ecae493_remove_all_tables_for_conversion_to_.py index 33f3673..f16a1a5 100644 --- a/alembic/versions/2e383ecae493_remove_all_tables_for_conversion_to_.py +++ b/alembic/versions/2e383ecae493_remove_all_tables_for_conversion_to_.py @@ -1,27 +1,28 @@ -"""remove all tables for conversion to string keys +""" +remove all tables for conversion to string keys.  Revision ID: 2e383ecae493  Revises: b1fdfe71fcb7  Create Date: 2020-08-25 16:31:05.025135  """ -from alembic import op -import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = '2e383ecae493' -down_revision = 'b1fdfe71fcb7' +revision = "2e383ecae493" +down_revision = "b1fdfe71fcb7"  branch_labels = None  depends_on = None -def upgrade(): -    op.drop_table('messages') -    op.drop_table('users') -    op.drop_table('channels') -    op.drop_table('categories') +def upgrade() -> None: +    """Apply the current migration.""" +    op.drop_table("messages") +    op.drop_table("users") +    op.drop_table("channels") +    op.drop_table("categories") -def downgrade(): -    pass +def downgrade() -> None: +    """Revert the current migration.""" diff --git a/alembic/versions/2faa292e5818_add_indexes_on_message_table.py b/alembic/versions/2faa292e5818_add_indexes_on_message_table.py index 0cb7400..04f424c 100644 --- a/alembic/versions/2faa292e5818_add_indexes_on_message_table.py +++ b/alembic/versions/2faa292e5818_add_indexes_on_message_table.py @@ -1,30 +1,32 @@ -"""Add indexes on message table +""" +Add indexes on message table.  Revision ID: 2faa292e5818  Revises: d052a6b677e6  Create Date: 2020-10-27 02:52:23.791738  """ -from alembic import op -import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = '2faa292e5818' -down_revision = 'd052a6b677e6' +revision = "2faa292e5818" +down_revision = "d052a6b677e6"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.create_index(op.f('ix_messages_author_id'), 'messages', ['author_id'], unique=False) -    op.create_index(op.f('ix_messages_channel_id'), 'messages', ['channel_id'], unique=False) +    op.create_index(op.f("ix_messages_author_id"), "messages", ["author_id"], unique=False) +    op.create_index(op.f("ix_messages_channel_id"), "messages", ["channel_id"], unique=False)      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_index(op.f('ix_messages_channel_id'), table_name='messages') -    op.drop_index(op.f('ix_messages_author_id'), table_name='messages') +    op.drop_index(op.f("ix_messages_channel_id"), table_name="messages") +    op.drop_index(op.f("ix_messages_author_id"), table_name="messages")      # ### end Alembic commands ### diff --git a/alembic/versions/38085c8f1099_enable_cascade_deletion_for_messages.py b/alembic/versions/38085c8f1099_enable_cascade_deletion_for_messages.py index 522d96a..db404bb 100644 --- a/alembic/versions/38085c8f1099_enable_cascade_deletion_for_messages.py +++ b/alembic/versions/38085c8f1099_enable_cascade_deletion_for_messages.py @@ -1,34 +1,36 @@ -"""enable cascade deletion for messages +""" +enable cascade deletion for messages.  Revision ID: 38085c8f1099  Revises: abc065ff2fb3  Create Date: 2020-08-25 13:00:21.629836  """ -from alembic import op -import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = '38085c8f1099' -down_revision = 'abc065ff2fb3' +revision = "38085c8f1099" +down_revision = "abc065ff2fb3"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_constraint('messages_author_id_fkey', 'messages', type_='foreignkey') -    op.drop_constraint('messages_channel_id_fkey', 'messages', type_='foreignkey') -    op.create_foreign_key(None, 'messages', 'users', ['author_id'], ['id'], ondelete='CASCADE') -    op.create_foreign_key(None, 'messages', 'channels', ['channel_id'], ['id'], ondelete='CASCADE') +    op.drop_constraint("messages_author_id_fkey", "messages", type_="foreignkey") +    op.drop_constraint("messages_channel_id_fkey", "messages", type_="foreignkey") +    op.create_foreign_key(None, "messages", "users", ["author_id"], ["id"], ondelete="CASCADE") +    op.create_foreign_key(None, "messages", "channels", ["channel_id"], ["id"], ondelete="CASCADE")      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_constraint(None, 'messages', type_='foreignkey') -    op.drop_constraint(None, 'messages', type_='foreignkey') -    op.create_foreign_key('messages_channel_id_fkey', 'messages', 'channels', ['channel_id'], ['id']) -    op.create_foreign_key('messages_author_id_fkey', 'messages', 'users', ['author_id'], ['id']) +    op.drop_constraint(None, "messages", type_="foreignkey") +    op.drop_constraint(None, "messages", type_="foreignkey") +    op.create_foreign_key("messages_channel_id_fkey", "messages", "channels", ["channel_id"], ["id"]) +    op.create_foreign_key("messages_author_id_fkey", "messages", "users", ["author_id"], ["id"])      # ### end Alembic commands ### diff --git a/alembic/versions/408aac572bff_cascade_delete_channels_on_category_.py b/alembic/versions/408aac572bff_cascade_delete_channels_on_category_.py index 4afbba7..e962309 100644 --- a/alembic/versions/408aac572bff_cascade_delete_channels_on_category_.py +++ b/alembic/versions/408aac572bff_cascade_delete_channels_on_category_.py @@ -1,30 +1,32 @@ -"""cascade delete channels on category deletion +""" +cascade delete channels on category deletion.  Revision ID: 408aac572bff  Revises: 25f3b8fb9961  Create Date: 2021-03-04 01:11:32.519071  """ -from alembic import op -import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = '408aac572bff' -down_revision = '25f3b8fb9961' +revision = "408aac572bff" +down_revision = "25f3b8fb9961"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_constraint('channels_category_id_fkey', 'channels', type_='foreignkey') -    op.create_foreign_key(None, 'channels', 'categories', ['category_id'], ['id'], ondelete='CASCADE') +    op.drop_constraint("channels_category_id_fkey", "channels", type_="foreignkey") +    op.create_foreign_key(None, "channels", "categories", ["category_id"], ["id"], ondelete="CASCADE")      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_constraint(None, 'channels', type_='foreignkey') -    op.create_foreign_key('channels_category_id_fkey', 'channels', 'categories', ['category_id'], ['id']) +    op.drop_constraint(None, "channels", type_="foreignkey") +    op.create_foreign_key("channels_category_id_fkey", "channels", "categories", ["category_id"], ["id"])      # ### end Alembic commands ### diff --git a/alembic/versions/451f61f7f7cb_add_is_verified_column.py b/alembic/versions/451f61f7f7cb_add_is_verified_column.py index 2c03698..bed4a0c 100644 --- a/alembic/versions/451f61f7f7cb_add_is_verified_column.py +++ b/alembic/versions/451f61f7f7cb_add_is_verified_column.py @@ -1,28 +1,31 @@ -"""Add is_verified column +""" +Add is_verified column.  Revision ID: 451f61f7f7cb  Revises: 5683123ff89a  Create Date: 2020-08-29 17:19:32.029529  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = '451f61f7f7cb' -down_revision = '5683123ff89a' +revision = "451f61f7f7cb" +down_revision = "5683123ff89a"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.add_column('users', sa.Column('is_verified', sa.Boolean(), nullable=True)) +    op.add_column("users", sa.Column("is_verified", sa.Boolean(), nullable=True))      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_column('users', 'is_verified') +    op.drop_column("users", "is_verified")      # ### end Alembic commands ### diff --git a/alembic/versions/45973dacf7da_add_public_flags_column.py b/alembic/versions/45973dacf7da_add_public_flags_column.py index 06c7338..e5fa5a4 100644 --- a/alembic/versions/45973dacf7da_add_public_flags_column.py +++ b/alembic/versions/45973dacf7da_add_public_flags_column.py @@ -1,28 +1,31 @@ -"""Add public flags column +""" +Add public flags column.  Revision ID: 45973dacf7da  Revises: 451f61f7f7cb  Create Date: 2020-09-02 10:38:18.142271  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = '45973dacf7da' -down_revision = '451f61f7f7cb' +revision = "45973dacf7da" +down_revision = "451f61f7f7cb"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.add_column('users', sa.Column('public_flags', sa.JSON(), nullable=True)) +    op.add_column("users", sa.Column("public_flags", sa.JSON(), nullable=True))      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_column('users', 'public_flags') +    op.drop_column("users", "public_flags")      # ### end Alembic commands ### diff --git a/alembic/versions/4d293b37634c_initial_migration.py b/alembic/versions/4d293b37634c_initial_migration.py index fa5e21b..e78a0b4 100644 --- a/alembic/versions/4d293b37634c_initial_migration.py +++ b/alembic/versions/4d293b37634c_initial_migration.py @@ -1,28 +1,25 @@ -"""initial migration +""" +initial migration.  Revision ID: 4d293b37634c  Revises:  Create Date: 2020-08-25 03:09:13.565389  """ -from alembic import op -import sqlalchemy as sa  # revision identifiers, used by Alembic. -revision = '4d293b37634c' +revision = "4d293b37634c"  down_revision = None  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    pass      # ### end Alembic commands ### -def downgrade(): -    # ### commands auto generated by Alembic - please adjust! ### -    pass -    # ### end Alembic commands ### +def downgrade() -> None: +    """Revert the current migration.""" diff --git a/alembic/versions/563a15b2a76e_add_support_for_threads.py b/alembic/versions/563a15b2a76e_add_support_for_threads.py index 6f21e92..e8b39a6 100644 --- a/alembic/versions/563a15b2a76e_add_support_for_threads.py +++ b/alembic/versions/563a15b2a76e_add_support_for_threads.py @@ -1,47 +1,50 @@ -"""Add support for threads +""" +Add support for threads.  Revision ID: 563a15b2a76e  Revises: d6c9452c3940  Create Date: 2021-10-31 14:46:49.926646  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = '563a15b2a76e' -down_revision = 'd6c9452c3940' +revision = "563a15b2a76e" +down_revision = "d6c9452c3940"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ###      op.create_table( -        'threads', -        sa.Column('id', sa.String(), nullable=False), -        sa.Column('parent_channel_id', sa.String(), nullable=False), -        sa.Column('created_at', sa.DateTime(), nullable=True), -        sa.Column('name', sa.String(), nullable=False), -        sa.Column('archived', sa.Boolean(), nullable=False), -        sa.Column('auto_archive_duration', sa.Integer(), nullable=False), -        sa.Column('locked', sa.Boolean(), nullable=False), -        sa.Column('type', sa.String(), nullable=False), -        sa.ForeignKeyConstraint(['parent_channel_id'], ['channels.id'], ondelete='CASCADE'), -        sa.PrimaryKeyConstraint('id') +        "threads", +        sa.Column("id", sa.String(), nullable=False), +        sa.Column("parent_channel_id", sa.String(), nullable=False), +        sa.Column("created_at", sa.DateTime(), nullable=True), +        sa.Column("name", sa.String(), nullable=False), +        sa.Column("archived", sa.Boolean(), nullable=False), +        sa.Column("auto_archive_duration", sa.Integer(), nullable=False), +        sa.Column("locked", sa.Boolean(), nullable=False), +        sa.Column("type", sa.String(), nullable=False), +        sa.ForeignKeyConstraint(["parent_channel_id"], ["channels.id"], ondelete="CASCADE"), +        sa.PrimaryKeyConstraint("id"),      ) -    op.add_column('messages', sa.Column('thread_id', sa.String(), nullable=True)) -    op.create_index(op.f('ix_messages_thread_id'), 'messages', ['thread_id'], unique=False) -    op.create_index(op.f('ix_threads_type'), 'threads', ['type'], unique=False) -    op.create_foreign_key(None, 'messages', 'threads', ['thread_id'], ['id'], ondelete='CASCADE') +    op.add_column("messages", sa.Column("thread_id", sa.String(), nullable=True)) +    op.create_index(op.f("ix_messages_thread_id"), "messages", ["thread_id"], unique=False) +    op.create_index(op.f("ix_threads_type"), "threads", ["type"], unique=False) +    op.create_foreign_key(None, "messages", "threads", ["thread_id"], ["id"], ondelete="CASCADE")      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_constraint(None, 'messages', type_='foreignkey') -    op.drop_index(op.f('ix_messages_thread_id'), table_name='messages') -    op.drop_column('messages', 'thread_id') -    op.drop_table('threads') +    op.drop_constraint(None, "messages", type_="foreignkey") +    op.drop_index(op.f("ix_messages_thread_id"), table_name="messages") +    op.drop_column("messages", "thread_id") +    op.drop_table("threads")      # ### end Alembic commands ### diff --git a/alembic/versions/5683123ff89a_add_in_guild_column.py b/alembic/versions/5683123ff89a_add_in_guild_column.py index 12d67a1..9641b18 100644 --- a/alembic/versions/5683123ff89a_add_in_guild_column.py +++ b/alembic/versions/5683123ff89a_add_in_guild_column.py @@ -1,28 +1,31 @@ -"""add in_guild column +""" +add in_guild column.  Revision ID: 5683123ff89a  Revises: 2743389eb63e  Create Date: 2020-08-25 19:57:41.958297  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = '5683123ff89a' -down_revision = '2743389eb63e' +revision = "5683123ff89a" +down_revision = "2743389eb63e"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.add_column('users', sa.Column('in_guild', sa.Boolean(), nullable=True)) +    op.add_column("users", sa.Column("in_guild", sa.Boolean(), nullable=True))      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_column('users', 'in_guild') +    op.drop_column("users", "in_guild")      # ### end Alembic commands ### diff --git a/alembic/versions/6b52b1e7680b_add_channel_categories.py b/alembic/versions/6b52b1e7680b_add_channel_categories.py index bdbcde1..bde8a6d 100644 --- a/alembic/versions/6b52b1e7680b_add_channel_categories.py +++ b/alembic/versions/6b52b1e7680b_add_channel_categories.py @@ -1,34 +1,37 @@ -"""add channel categories +""" +add channel categories.  Revision ID: 6b52b1e7680b  Revises: d42a9cc66591  Create Date: 2020-08-25 05:10:43.945126  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = '6b52b1e7680b' -down_revision = 'd42a9cc66591' +revision = "6b52b1e7680b" +down_revision = "d42a9cc66591"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.create_table('categories', -    sa.Column('id', sa.BigInteger(), nullable=False), -    sa.Column('name', sa.String(), nullable=False), -    sa.PrimaryKeyConstraint('id') +    op.create_table("categories", +    sa.Column("id", sa.BigInteger(), nullable=False), +    sa.Column("name", sa.String(), nullable=False), +    sa.PrimaryKeyConstraint("id"),      ) -    op.add_column('channels', sa.Column('category_id', sa.BigInteger(), nullable=True)) +    op.add_column("channels", sa.Column("category_id", sa.BigInteger(), nullable=True))      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_column('channels', 'category_id') -    op.drop_table('categories') +    op.drop_column("channels", "category_id") +    op.drop_table("categories")      # ### end Alembic commands ### diff --git a/alembic/versions/911049796159_remove_opt_out_column.py b/alembic/versions/911049796159_remove_opt_out_column.py index 6ba71ba..812c80b 100644 --- a/alembic/versions/911049796159_remove_opt_out_column.py +++ b/alembic/versions/911049796159_remove_opt_out_column.py @@ -1,28 +1,31 @@ -"""Remove opt_out column +""" +Remove opt_out column.  Revision ID: 911049796159  Revises: 408aac572bff  Create Date: 2021-04-09 05:09:28.565384  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = '911049796159' -down_revision = '408aac572bff' +revision = "911049796159" +down_revision = "408aac572bff"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_column('users', 'opt_out') +    op.drop_column("users", "opt_out")      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.add_column('users', sa.Column('opt_out', sa.BOOLEAN(), autoincrement=False, nullable=True)) +    op.add_column("users", sa.Column("opt_out", sa.BOOLEAN(), autoincrement=False, nullable=True))      # ### end Alembic commands ### diff --git a/alembic/versions/a259ab5efcec_remove_verified_columns.py b/alembic/versions/a259ab5efcec_remove_verified_columns.py index 099e158..5b59d53 100644 --- a/alembic/versions/a259ab5efcec_remove_verified_columns.py +++ b/alembic/versions/a259ab5efcec_remove_verified_columns.py @@ -1,30 +1,34 @@ -"""remove verified columns +""" +remove verified columns.  Revision ID: a259ab5efcec  Revises: 2faa292e5818  Create Date: 2020-12-19 22:44:27.897133  """ -from alembic import op  import sqlalchemy as sa  from sqlalchemy.dialects import postgresql +from alembic import op +  # revision identifiers, used by Alembic. -revision = 'a259ab5efcec' -down_revision = '2faa292e5818' +revision = "a259ab5efcec" +down_revision = "2faa292e5818"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_column('users', 'is_verified') -    op.drop_column('users', 'verified_at') +    op.drop_column("users", "is_verified") +    op.drop_column("users", "verified_at")      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.add_column('users', sa.Column('verified_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True)) -    op.add_column('users', sa.Column('is_verified', sa.BOOLEAN(), autoincrement=False, nullable=True)) +    op.add_column("users", sa.Column("verified_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True)) +    op.add_column("users", sa.Column("is_verified", sa.BOOLEAN(), autoincrement=False, nullable=True))      # ### end Alembic commands ### diff --git a/alembic/versions/aa3517f1b1bd_add_verified_at_column.py b/alembic/versions/aa3517f1b1bd_add_verified_at_column.py index f7ea257..8c36e04 100644 --- a/alembic/versions/aa3517f1b1bd_add_verified_at_column.py +++ b/alembic/versions/aa3517f1b1bd_add_verified_at_column.py @@ -1,28 +1,31 @@ -"""Add verified_at column +""" +Add verified_at column.  Revision ID: aa3517f1b1bd  Revises: 45973dacf7da  Create Date: 2020-09-12 02:34:11.722267  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = 'aa3517f1b1bd' -down_revision = '45973dacf7da' +revision = "aa3517f1b1bd" +down_revision = "45973dacf7da"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.add_column('users', sa.Column('verified_at', sa.DateTime(), nullable=True)) +    op.add_column("users", sa.Column("verified_at", sa.DateTime(), nullable=True))      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_column('users', 'verified_at') +    op.drop_column("users", "verified_at")      # ### end Alembic commands ### diff --git a/alembic/versions/abc065ff2fb3_add_bot_column_to_user_table.py b/alembic/versions/abc065ff2fb3_add_bot_column_to_user_table.py index 9c53999..0fd0e36 100644 --- a/alembic/versions/abc065ff2fb3_add_bot_column_to_user_table.py +++ b/alembic/versions/abc065ff2fb3_add_bot_column_to_user_table.py @@ -1,28 +1,31 @@ -"""add bot column to user table +""" +add bot column to user table.  Revision ID: abc065ff2fb3  Revises: 6b52b1e7680b  Create Date: 2020-08-25 05:24:01.730596  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = 'abc065ff2fb3' -down_revision = '6b52b1e7680b' +revision = "abc065ff2fb3" +down_revision = "6b52b1e7680b"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.add_column('users', sa.Column('bot', sa.Boolean(), nullable=True)) +    op.add_column("users", sa.Column("bot", sa.Boolean(), nullable=True))      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_column('users', 'bot') +    op.drop_column("users", "bot")      # ### end Alembic commands ### diff --git a/alembic/versions/b1fdfe71fcb7_channel_category_foreign_key.py b/alembic/versions/b1fdfe71fcb7_channel_category_foreign_key.py index 329924b..8d7745a 100644 --- a/alembic/versions/b1fdfe71fcb7_channel_category_foreign_key.py +++ b/alembic/versions/b1fdfe71fcb7_channel_category_foreign_key.py @@ -1,28 +1,30 @@ -"""channel category foreign key +""" +channel category foreign key.  Revision ID: b1fdfe71fcb7  Revises: 38085c8f1099  Create Date: 2020-08-25 15:00:38.666504  """ -from alembic import op -import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = 'b1fdfe71fcb7' -down_revision = '38085c8f1099' +revision = "b1fdfe71fcb7" +down_revision = "38085c8f1099"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.create_foreign_key(None, 'channels', 'categories', ['category_id'], ['id']) +    op.create_foreign_key(None, "channels", "categories", ["category_id"], ["id"])      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_constraint(None, 'channels', type_='foreignkey') +    op.drop_constraint(None, "channels", type_="foreignkey")      # ### end Alembic commands ### diff --git a/alembic/versions/d052a6b677e6_add_is_deleted_to_message.py b/alembic/versions/d052a6b677e6_add_is_deleted_to_message.py index 8354a3e..3a8a952 100644 --- a/alembic/versions/d052a6b677e6_add_is_deleted_to_message.py +++ b/alembic/versions/d052a6b677e6_add_is_deleted_to_message.py @@ -1,28 +1,31 @@ -"""add is_deleted to Message +""" +add is_deleted to Message.  Revision ID: d052a6b677e6  Revises: aa3517f1b1bd  Create Date: 2020-10-18 19:20:20.862476  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = 'd052a6b677e6' -down_revision = 'aa3517f1b1bd' +revision = "d052a6b677e6" +down_revision = "aa3517f1b1bd"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.add_column('messages', sa.Column('is_deleted', sa.Boolean(), nullable=True)) +    op.add_column("messages", sa.Column("is_deleted", sa.Boolean(), nullable=True))      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_column('messages', 'is_deleted') +    op.drop_column("messages", "is_deleted")      # ### end Alembic commands ### diff --git a/alembic/versions/d42a9cc66591_add_channels_users_and_messages_table.py b/alembic/versions/d42a9cc66591_add_channels_users_and_messages_table.py index 0ed3728..b3c1a4d 100644 --- a/alembic/versions/d42a9cc66591_add_channels_users_and_messages_table.py +++ b/alembic/versions/d42a9cc66591_add_channels_users_and_messages_table.py @@ -1,54 +1,57 @@ -"""add channels, users and messages table +""" +add channels, users and messages table.  Revision ID: d42a9cc66591  Revises: 4d293b37634c  Create Date: 2020-08-25 03:10:21.282787  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = 'd42a9cc66591' -down_revision = '4d293b37634c' +revision = "d42a9cc66591" +down_revision = "4d293b37634c"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.create_table('channels', -    sa.Column('id', sa.BigInteger(), nullable=False), -    sa.Column('name', sa.String(), nullable=False), -    sa.Column('is_staff', sa.Boolean(), nullable=False), -    sa.PrimaryKeyConstraint('id') +    op.create_table("channels", +    sa.Column("id", sa.BigInteger(), nullable=False), +    sa.Column("name", sa.String(), nullable=False), +    sa.Column("is_staff", sa.Boolean(), nullable=False), +    sa.PrimaryKeyConstraint("id"),      ) -    op.create_table('users', -    sa.Column('id', sa.BigInteger(), nullable=False), -    sa.Column('name', sa.String(), nullable=False), -    sa.Column('avatar_hash', sa.String(), nullable=True), -    sa.Column('joined_at', sa.DateTime(), nullable=False), -    sa.Column('created_at', sa.DateTime(), nullable=False), -    sa.Column('is_staff', sa.Boolean(), nullable=False), -    sa.Column('opt_out', sa.Boolean(), nullable=True), -    sa.PrimaryKeyConstraint('id') +    op.create_table("users", +    sa.Column("id", sa.BigInteger(), nullable=False), +    sa.Column("name", sa.String(), nullable=False), +    sa.Column("avatar_hash", sa.String(), nullable=True), +    sa.Column("joined_at", sa.DateTime(), nullable=False), +    sa.Column("created_at", sa.DateTime(), nullable=False), +    sa.Column("is_staff", sa.Boolean(), nullable=False), +    sa.Column("opt_out", sa.Boolean(), nullable=True), +    sa.PrimaryKeyConstraint("id"),      ) -    op.create_table('messages', -    sa.Column('id', sa.BigInteger(), nullable=False), -    sa.Column('channel_id', sa.BigInteger(), nullable=True), -    sa.Column('author_id', sa.BigInteger(), nullable=True), -    sa.Column('created_at', sa.DateTime(), nullable=True), -    sa.ForeignKeyConstraint(['author_id'], ['users.id'], ), -    sa.ForeignKeyConstraint(['channel_id'], ['channels.id'], ), -    sa.PrimaryKeyConstraint('id') +    op.create_table("messages", +    sa.Column("id", sa.BigInteger(), nullable=False), +    sa.Column("channel_id", sa.BigInteger(), nullable=True), +    sa.Column("author_id", sa.BigInteger(), nullable=True), +    sa.Column("created_at", sa.DateTime(), nullable=True), +    sa.ForeignKeyConstraint(["author_id"], ["users.id"] ), +    sa.ForeignKeyConstraint(["channel_id"], ["channels.id"] ), +    sa.PrimaryKeyConstraint("id"),      )      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_table('messages') -    op.drop_table('users') -    op.drop_table('channels') +    op.drop_table("messages") +    op.drop_table("users") +    op.drop_table("channels")      # ### end Alembic commands ### diff --git a/alembic/versions/d6c9452c3940_add_discord_py_2_0_avatar_support.py b/alembic/versions/d6c9452c3940_add_discord_py_2_0_avatar_support.py index 1fcfe9b..751aa65 100644 --- a/alembic/versions/d6c9452c3940_add_discord_py_2_0_avatar_support.py +++ b/alembic/versions/d6c9452c3940_add_discord_py_2_0_avatar_support.py @@ -1,28 +1,31 @@ -"""Add Discord.py 2.0 avatar support +""" +Add Discord.py 2.0 avatar support.  Revision ID: d6c9452c3940  Revises: 911049796159  Create Date: 2021-10-31 12:48:03.323113  """ -from alembic import op  import sqlalchemy as sa +from alembic import op  # revision identifiers, used by Alembic. -revision = 'd6c9452c3940' -down_revision = '911049796159' +revision = "d6c9452c3940" +down_revision = "911049796159"  branch_labels = None  depends_on = None -def upgrade(): +def upgrade() -> None: +    """Apply the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.add_column('users', sa.Column('guild_avatar_hash', sa.String(), nullable=True)) +    op.add_column("users", sa.Column("guild_avatar_hash", sa.String(), nullable=True))      # ### end Alembic commands ### -def downgrade(): +def downgrade() -> None: +    """Revert the current migration."""      # ### commands auto generated by Alembic - please adjust! ### -    op.drop_column('users', 'guild_avatar_hash') +    op.drop_column("users", "guild_avatar_hash")      # ### end Alembic commands ### diff --git a/config-default.toml b/config-default.toml index de8290b..9032294 100644 --- a/config-default.toml +++ b/config-default.toml @@ -33,20 +33,14 @@ staff_categories = [      787641585624940544,      412287621704843264,      881573419878076456, -    820711852652494868 +    820711852652494868,  ]  # Channels that should be flagged as staff only by channel ID -staff_channels =[ -    412375055910043655 -] +staff_channels = [412375055910043655]  # Don't report messages for the following categories -ignore_categories = [ -    714494672835444826, -    890331800025563216, -    895417395261341766 -] +ignore_categories = [714494672835444826, 890331800025563216, 895417395261341766]  [database]  # Postgres! @@ -62,3 +56,6 @@ port = 5432  database = "metricity"  username = "postgres"  password = "postgres" + +# Log all queries (SQLAlchemy echo) +log_queries = false diff --git a/create_metricity_db.py b/create_metricity_db.py index 215ba6e..2dc3a75 100644 --- a/create_metricity_db.py +++ b/create_metricity_db.py @@ -1,42 +1,49 @@  """Ensures the metricity db exists before running migrations.""" -import os +import asyncio  from urllib.parse import SplitResult, urlsplit -import psycopg2 -from psycopg2 import sql -from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT +import asyncpg + +from metricity.database import build_db_uri  def parse_db_url(db_url: str) -> SplitResult:      """Validate and split the given database url."""      db_url_parts = urlsplit(db_url)      if not all(( -        db_url_parts.hostname, +        db_url_parts.netloc,          db_url_parts.username,          db_url_parts.password,      )):          raise ValueError("The given db_url is not a valid PostgreSQL database URL.")      return db_url_parts +async def create_db() -> None: +    """Create the Metricity database if it does not exist.""" +    parts = parse_db_url(build_db_uri()) +    try: +        await asyncpg.connect( +            host=parts.hostname, +            port=parts.port, +            user=parts.username, +            database=parts.path[1:], +            password=parts.password, +        ) +    except asyncpg.InvalidCatalogNameError: +        print("Creating metricity database.") # noqa: T201 +        sys_conn = await asyncpg.connect( +            database="template1", +            user=parts.username, +            host=parts.hostname, +            port=parts.port, +            password=parts.password, +        ) + +        await sys_conn.execute( +            f'CREATE DATABASE "{parts.path[1:] or "metricity"}" OWNER "{parts.username}"', +        ) + +        await sys_conn.close()  if __name__ == "__main__": -    database_parts = parse_db_url(os.environ["DATABASE_URI"]) - -    conn = psycopg2.connect( -        host=database_parts.hostname, -        port=database_parts.port, -        user=database_parts.username, -        password=database_parts.password, -    ) - -    db_name = database_parts.path[1:] or "metricity" - -    # Required to create a database in a .execute() call -    conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) -    with conn.cursor() as cursor: -        cursor.execute("SELECT 1 FROM pg_catalog.pg_database WHERE datname = %s", (db_name,)) -        exists = cursor.fetchone() -        if not exists: -            print("Creating metricity database.")  # noqa: T201 -            cursor.execute(sql.SQL("CREATE DATABASE {dbname}").format(dbname=sql.Identifier(db_name))) -    conn.close() +    asyncio.get_event_loop().run_until_complete(create_db()) diff --git a/docker-compose.yml b/docker-compose.yml index 00693e5..af34efe 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -38,4 +38,4 @@ services:      env_file:        - .env      environment: -      DATABASE_URI: postgres://pysite:pysite@postgres/metricity +      DATABASE_URI: postgresql+asyncpg://pysite:pysite@postgres/metricity diff --git a/metricity/__init__.py b/metricity/__init__.py index df7d211..e5d4e4d 100644 --- a/metricity/__init__.py +++ b/metricity/__init__.py @@ -22,10 +22,6 @@ coloredlogs.install(level=PythonConfig.log_level)  # Set Discord.py log level  logging.getLogger("discord.client").setLevel(PythonConfig.discord_log_level) -# Gino has an obnoxiously loud log for all queries executed, not great when inserting -# tens of thousands of users, so we can disable that (it's just a SQLAlchemy logger) -logging.getLogger("gino.engine._SAEngine").setLevel(logging.WARNING) -  # On Windows, the selector event loop is required for aiodns.  if os.name == "nt":      asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) diff --git a/metricity/bot.py b/metricity/bot.py index 0c0fa38..d4b9b30 100644 --- a/metricity/bot.py +++ b/metricity/bot.py @@ -6,7 +6,6 @@ from pydis_core import BotBase  from pydis_core.utils import logging  from metricity import exts -from metricity.database import connect  log = logging.get_logger(__name__) @@ -24,7 +23,6 @@ class Bot(BotBase):          """Connect to db and load cogs."""          await super().setup_hook()          log.info("Metricity is online, logged in as %s", self.user) -        await connect()          await self.load_extensions(exts)      async def on_error(self, event: str, *_args, **_kwargs) -> None: diff --git a/metricity/config.py b/metricity/config.py index 9dec9af..5565052 100644 --- a/metricity/config.py +++ b/metricity/config.py @@ -127,3 +127,5 @@ class DatabaseConfig(metaclass=ConfigSection):      database: str | None      username: str | None      password: str | None + +    log_queries: bool | None diff --git a/metricity/database.py b/metricity/database.py index 19ec1ff..347ce90 100644 --- a/metricity/database.py +++ b/metricity/database.py @@ -2,35 +2,33 @@  import logging  from datetime import UTC, datetime +from urllib.parse import urlsplit -import gino  from sqlalchemy.engine import Dialect +from sqlalchemy.ext.asyncio import AsyncEngine, async_sessionmaker, create_async_engine  from sqlalchemy.types import DateTime, TypeDecorator  from metricity.config import DatabaseConfig  log = logging.getLogger(__name__) -db = gino.Gino() - -  def build_db_uri() -> str: -    """Use information from the config file to build a PostgreSQL URI.""" +    """Build the database uri from the config."""      if DatabaseConfig.uri: +        parsed = urlsplit(DatabaseConfig.uri) +        if parsed.scheme != "postgresql+asyncpg": +            log.debug("The given db_url did not use the asyncpg driver. Updating the db_url to use asyncpg.") +            return parsed._replace(scheme="postgresql+asyncpg").geturl() +          return DatabaseConfig.uri      return ( -        f"postgresql://{DatabaseConfig.username}:{DatabaseConfig.password}" +        f"postgresql+asyncpg://{DatabaseConfig.username}:{DatabaseConfig.password}"          f"@{DatabaseConfig.host}:{DatabaseConfig.port}/{DatabaseConfig.database}"      ) - -async def connect() -> None: -    """Initiate a connection to the database.""" -    log.info("Initiating connection to the database") -    await db.set_bind(build_db_uri()) -    log.info("Database connection established") - +engine: AsyncEngine = create_async_engine(build_db_uri(), echo=DatabaseConfig.log_queries) +async_session = async_sessionmaker(engine, expire_on_commit=False)  class TZDateTime(TypeDecorator):      """ diff --git a/metricity/exts/event_listeners/_utils.py b/metricity/exts/event_listeners/_utils.py index 6b2aacf..4006ea2 100644 --- a/metricity/exts/event_listeners/_utils.py +++ b/metricity/exts/event_listeners/_utils.py @@ -1,11 +1,12 @@  import discord +from sqlalchemy.ext.asyncio import AsyncSession  from metricity import models -async def insert_thread(thread: discord.Thread) -> None: -    """Insert the given thread to the database.""" -    await models.Thread.create( +def insert_thread(thread: discord.Thread, sess: AsyncSession) -> None: +    """Insert the given thread to the database session.""" +    sess.add(models.Thread(          id=str(thread.id),          parent_channel_id=str(thread.parent_id),          name=thread.name, @@ -13,12 +14,13 @@ async def insert_thread(thread: discord.Thread) -> None:          auto_archive_duration=thread.auto_archive_duration,          locked=thread.locked,          type=thread.type.name, -    ) +        created_at=thread.created_at, +    )) -async def sync_message(message: discord.Message, *, from_thread: bool) -> None: +async def sync_message(message: discord.Message, sess: AsyncSession, *, from_thread: bool) -> None:      """Sync the given message with the database.""" -    if await models.Message.get(str(message.id)): +    if await sess.get(models.Message, str(message.id)):          return      args = { @@ -33,4 +35,4 @@ async def sync_message(message: discord.Message, *, from_thread: bool) -> None:          args["channel_id"] = str(thread.parent_id)          args["thread_id"] = str(thread.id) -    await models.Message.create(**args) +    sess.add(models.Message(**args)) diff --git a/metricity/exts/event_listeners/guild_listeners.py b/metricity/exts/event_listeners/guild_listeners.py index c7c074f..9ad0bda 100644 --- a/metricity/exts/event_listeners/guild_listeners.py +++ b/metricity/exts/event_listeners/guild_listeners.py @@ -3,16 +3,17 @@  import discord  from discord.ext import commands  from pydis_core.utils import logging, scheduling +from sqlalchemy import update +from sqlalchemy.dialects.postgresql import insert  from metricity import models  from metricity.bot import Bot  from metricity.config import BotConfig -from metricity.database import db +from metricity.database import async_session  from metricity.exts.event_listeners import _utils  log = logging.get_logger(__name__) -  class GuildListeners(commands.Cog):      """Listen for guild (and guild channel) events and sync them to the database.""" @@ -31,7 +32,9 @@ class GuildListeners(commands.Cog):          await self.sync_thread_archive_state(guild)          log.info("Beginning user synchronisation process") -        await models.User.update.values(in_guild=False).gino.status() +        async with async_session() as sess: +            await sess.execute(update(models.User).values(in_guild=False)) +            await sess.commit()          users = [              { @@ -54,9 +57,29 @@ class GuildListeners(commands.Cog):          user_chunks = discord.utils.as_chunks(users, 500) -        for chunk in user_chunks: -            log.info("Upserting chunk of %d", len(chunk)) -            await models.User.bulk_upsert(chunk) +        async with async_session() as sess: +            for chunk in user_chunks: +                log.info("Upserting chunk of %d", len(chunk)) +                qs = insert(models.User).values(chunk) + +                update_cols = [ +                    "name", +                    "avatar_hash", +                    "guild_avatar_hash", +                    "joined_at", +                    "is_staff", +                    "bot", +                    "in_guild", +                    "public_flags", +                    "pending", +                ] + +                await sess.execute(qs.on_conflict_do_update( +                    index_elements=[models.User.id], +                    set_={k: getattr(qs.excluded, k) for k in update_cols}, +                )) + +            await sess.commit()          log.info("User upsert complete") @@ -66,9 +89,19 @@ class GuildListeners(commands.Cog):      async def sync_thread_archive_state(guild: discord.Guild) -> None:          """Sync the archive state of all threads in the database with the state in guild."""          active_thread_ids = [str(thread.id) for thread in guild.threads] -        async with db.transaction() as tx: -            async for db_thread in tx.connection.iterate(models.Thread.query): -                await db_thread.update(archived=db_thread.id not in active_thread_ids).apply() + +        async with async_session() as sess: +            await sess.execute( +                update(models.Thread) +                .where(models.Thread.id.in_(active_thread_ids)) +                .values(archived=False), +            ) +            await sess.execute( +                update(models.Thread) +                .where(~models.Thread.id.in_(active_thread_ids)) +                .values(archived=True), +            ) +            await sess.commit()      async def sync_channels(self, guild: discord.Guild) -> None:          """Sync channels and categories with the database.""" @@ -76,59 +109,61 @@ class GuildListeners(commands.Cog):          log.info("Beginning category synchronisation process") -        for channel in guild.channels: -            if isinstance(channel, discord.CategoryChannel): -                if db_cat := await models.Category.get(str(channel.id)): -                    await db_cat.update(name=channel.name).apply() -                else: -                    await models.Category.create(id=str(channel.id), name=channel.name) +        async with async_session() as sess: +            for channel in guild.channels: +                if isinstance(channel, discord.CategoryChannel): +                    if existing_cat := await sess.get(models.Category, str(channel.id)): +                        existing_cat.name = channel.name +                    else: +                        sess.add(models.Category(id=str(channel.id), name=channel.name)) + +            await sess.commit()          log.info("Category synchronisation process complete, synchronising channels") -        for channel in guild.channels: -            if channel.category and channel.category.id in BotConfig.ignore_categories: -                continue - -            if not isinstance(channel, discord.CategoryChannel): -                category_id = str(channel.category.id) if channel.category else None -                # Cast to bool so is_staff is False if channel.category is None -                is_staff = channel.id in BotConfig.staff_channels or bool( -                    channel.category and channel.category.id in BotConfig.staff_categories, -                ) -                if db_chan := await models.Channel.get(str(channel.id)): -                    await db_chan.update( -                        name=channel.name, -                        category_id=category_id, -                        is_staff=is_staff, -                    ).apply() -                else: -                    await models.Channel.create( -                        id=str(channel.id), -                        name=channel.name, -                        category_id=category_id, -                        is_staff=is_staff, +        async with async_session() as sess: +            for channel in guild.channels: +                if channel.category and channel.category.id in BotConfig.ignore_categories: +                    continue + +                if not isinstance(channel, discord.CategoryChannel): +                    category_id = str(channel.category.id) if channel.category else None +                    # Cast to bool so is_staff is False if channel.category is None +                    is_staff = channel.id in BotConfig.staff_channels or bool( +                        channel.category and channel.category.id in BotConfig.staff_categories,                      ) +                    if db_chan := await sess.get(models.Channel, str(channel.id)): +                        db_chan.name = channel.name +                    else: +                        sess.add(models.Channel( +                            id=str(channel.id), +                            name=channel.name, +                            category_id=category_id, +                            is_staff=is_staff, +                        )) + +            await sess.commit()          log.info("Channel synchronisation process complete, synchronising threads") -        for thread in guild.threads: -            if thread.parent and thread.parent.category: -                if thread.parent.category.id in BotConfig.ignore_categories: +        async with async_session() as sess: +            for thread in guild.threads: +                if thread.parent and thread.parent.category: +                    if thread.parent.category.id in BotConfig.ignore_categories: +                        continue +                else: +                    # This is a forum channel, not currently supported by Discord.py. Ignore it.                      continue -            else: -                # This is a forum channel, not currently supported by Discord.py. Ignore it. -                continue - -            if db_thread := await models.Thread.get(str(thread.id)): -                await db_thread.update( -                    name=thread.name, -                    archived=thread.archived, -                    auto_archive_duration=thread.auto_archive_duration, -                    locked=thread.locked, -                    type=thread.type.name, -                ).apply() -            else: -                await _utils.insert_thread(thread) + +                if db_thread := await sess.get(models.Thread, str(thread.id)): +                    db_thread.name = thread.name +                    db_thread.archived = thread.archived +                    db_thread.auto_archive_duration = thread.auto_archive_duration +                    db_thread.locked = thread.locked +                    db_thread.type = thread.type.name +                else: +                    _utils.insert_thread(thread, sess) +            await sess.commit()          log.info("Thread synchronisation process complete, finished synchronising guild.")          self.bot.channel_sync_in_progress.set() diff --git a/metricity/exts/event_listeners/member_listeners.py b/metricity/exts/event_listeners/member_listeners.py index ddf5954..dc1e3c1 100644 --- a/metricity/exts/event_listeners/member_listeners.py +++ b/metricity/exts/event_listeners/member_listeners.py @@ -5,9 +5,11 @@ import contextlib  import discord  from asyncpg.exceptions import UniqueViolationError  from discord.ext import commands +from sqlalchemy import update  from metricity.bot import Bot  from metricity.config import BotConfig +from metricity.database import async_session  from metricity.models import User @@ -25,10 +27,11 @@ class MemberListeners(commands.Cog):          if member.guild.id != BotConfig.guild_id:              return -        if db_user := await User.get(str(member.id)): -            await db_user.update( -                in_guild=False, -            ).apply() +        async with async_session() as sess: +            await sess.execute( +                update(User).where(User.id == str(member.id)).values(in_guild=False), +            ) +            await sess.commit()      @commands.Cog.listener()      async def on_member_join(self, member: discord.Member) -> None: @@ -38,22 +41,9 @@ class MemberListeners(commands.Cog):          if member.guild.id != BotConfig.guild_id:              return -        if db_user := await User.get(str(member.id)): -            await db_user.update( -                id=str(member.id), -                name=member.name, -                avatar_hash=getattr(member.avatar, "key", None), -                guild_avatar_hash=getattr(member.guild_avatar, "key", None), -                joined_at=member.joined_at, -                created_at=member.created_at, -                is_staff=BotConfig.staff_role_id in [role.id for role in member.roles], -                public_flags=dict(member.public_flags), -                pending=member.pending, -                in_guild=True, -            ).apply() -        else: -            with contextlib.suppress(UniqueViolationError): -                await User.create( +        async with async_session() as sess: +            if await sess.get(User, str(member.id)): +                await sess.execute(update(User).where(User.id == str(member.id)).values(                      id=str(member.id),                      name=member.name,                      avatar_hash=getattr(member.avatar, "key", None), @@ -64,7 +54,23 @@ class MemberListeners(commands.Cog):                      public_flags=dict(member.public_flags),                      pending=member.pending,                      in_guild=True, -                ) +                )) +            else: +                with contextlib.suppress(UniqueViolationError): +                    await sess.add(User( +                        id=str(member.id), +                        name=member.name, +                        avatar_hash=getattr(member.avatar, "key", None), +                        guild_avatar_hash=getattr(member.guild_avatar, "key", None), +                        joined_at=member.joined_at, +                        created_at=member.created_at, +                        is_staff=BotConfig.staff_role_id in [role.id for role in member.roles], +                        public_flags=dict(member.public_flags), +                        pending=member.pending, +                        in_guild=True, +                    )) + +            await sess.commit()      @commands.Cog.listener()      async def on_member_update(self, _before: discord.Member, member: discord.Member) -> None: @@ -80,41 +86,43 @@ class MemberListeners(commands.Cog):          roles = {role.id for role in member.roles} -        if db_user := await User.get(str(member.id)): -            if ( -                db_user.name != member.name or -                db_user.avatar_hash != getattr(member.avatar, "key", None) or -                db_user.guild_avatar_hash != getattr(member.guild_avatar, "key", None) or -                BotConfig.staff_role_id in -                [role.id for role in member.roles] != db_user.is_staff -                or db_user.pending is not member.pending -            ): -                await db_user.update( -                    id=str(member.id), -                    name=member.name, -                    avatar_hash=getattr(member.avatar, "key", None), -                    guild_avatar_hash=getattr(member.guild_avatar, "key", None), -                    joined_at=member.joined_at, -                    created_at=member.created_at, -                    is_staff=BotConfig.staff_role_id in roles, -                    public_flags=dict(member.public_flags), -                    in_guild=True, -                    pending=member.pending, -                ).apply() -        else: -            with contextlib.suppress(UniqueViolationError): -                await User.create( -                    id=str(member.id), -                    name=member.name, -                    avatar_hash=getattr(member.avatar, "key", None), -                    guild_avatar_hash=getattr(member.guild_avatar, "key", None), -                    joined_at=member.joined_at, -                    created_at=member.created_at, -                    is_staff=BotConfig.staff_role_id in roles, -                    public_flags=dict(member.public_flags), -                    in_guild=True, -                    pending=member.pending, -                ) +        async with async_session() as sess: +            if db_user := await sess.get(User, str(member.id)): +                if ( +                    db_user.name != member.name or +                    db_user.avatar_hash != getattr(member.avatar, "key", None) or +                    db_user.guild_avatar_hash != getattr(member.guild_avatar, "key", None) or +                    (BotConfig.staff_role_id in roles) != db_user.is_staff +                    or db_user.pending is not member.pending +                ): +                    await sess.execute(update(User).where(User.id == str(member.id)).values( +                        id=str(member.id), +                        name=member.name, +                        avatar_hash=getattr(member.avatar, "key", None), +                        guild_avatar_hash=getattr(member.guild_avatar, "key", None), +                        joined_at=member.joined_at, +                        created_at=member.created_at, +                        is_staff=BotConfig.staff_role_id in roles, +                        public_flags=dict(member.public_flags), +                        in_guild=True, +                        pending=member.pending, +                    )) +            else: +                with contextlib.suppress(UniqueViolationError): +                    sess.add(User( +                        id=str(member.id), +                        name=member.name, +                        avatar_hash=getattr(member.avatar, "key", None), +                        guild_avatar_hash=getattr(member.guild_avatar, "key", None), +                        joined_at=member.joined_at, +                        created_at=member.created_at, +                        is_staff=BotConfig.staff_role_id in roles, +                        public_flags=dict(member.public_flags), +                        in_guild=True, +                        pending=member.pending, +                    )) + +            await sess.commit() diff --git a/metricity/exts/event_listeners/message_listeners.py b/metricity/exts/event_listeners/message_listeners.py index b446e26..28329d0 100644 --- a/metricity/exts/event_listeners/message_listeners.py +++ b/metricity/exts/event_listeners/message_listeners.py @@ -2,9 +2,11 @@  import discord  from discord.ext import commands +from sqlalchemy import update  from metricity.bot import Bot  from metricity.config import BotConfig +from metricity.database import async_session  from metricity.exts.event_listeners import _utils  from metricity.models import Message, User @@ -33,28 +35,32 @@ class MessageListeners(commands.Cog):          await self.bot.sync_process_complete.wait()          await self.bot.channel_sync_in_progress.wait() -        if not await User.get(str(message.author.id)): -            return +        async with async_session() as sess: +            if not await sess.get(User, str(message.author.id)): +                return -        cat_id = message.channel.category.id if message.channel.category else None -        if cat_id in BotConfig.ignore_categories: -            return +            cat_id = message.channel.category.id if message.channel.category else None +            if cat_id in BotConfig.ignore_categories: +                return + +            from_thread = isinstance(message.channel, discord.Thread) +            await _utils.sync_message(message, sess, from_thread=from_thread) -        from_thread = isinstance(message.channel, discord.Thread) -        await _utils.sync_message(message, from_thread=from_thread) +            await sess.commit()      @commands.Cog.listener()      async def on_raw_message_delete(self, message: discord.RawMessageDeleteEvent) -> None:          """If a message is deleted and we have a record of it set the is_deleted flag.""" -        if message := await Message.get(str(message.message_id)): -            await message.update(is_deleted=True).apply() +        async with async_session() as sess: +            await sess.execute(update(Message).where(Message.id == str(message.message_id)).values(is_deleted=True)) +            await sess.commit()      @commands.Cog.listener()      async def on_raw_bulk_message_delete(self, messages: discord.RawBulkMessageDeleteEvent) -> None:          """If messages are deleted in bulk and we have a record of them set the is_deleted flag.""" -        for message_id in messages.message_ids: -            if message := await Message.get(str(message_id)): -                await message.update(is_deleted=True).apply() +        async with async_session() as sess: +            await sess.execute(update(Message).where(Message.id.in_(messages.message_ids)).values(is_deleted=True)) +            await sess.commit()  async def setup(bot: Bot) -> None: diff --git a/metricity/models.py b/metricity/models.py index c2ae0c7..7389e86 100644 --- a/metricity/models.py +++ b/metricity/models.py @@ -1,116 +1,78 @@  """Database models used by Metricity for statistic collection."""  from datetime import UTC, datetime -from typing import Any -from sqlalchemy.dialects.postgresql import insert +from sqlalchemy import JSON, ForeignKey +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column -from metricity.database import TZDateTime, db +from metricity.database import TZDateTime -class Category(db.Model): +class Base(DeclarativeBase): +    """Base class for all database models.""" + + +class Category(Base):      """Database model representing a Discord category channel."""      __tablename__ = "categories" -    id = db.Column(db.String, primary_key=True) -    name = db.Column(db.String, nullable=False) +    id: Mapped[str] = mapped_column(primary_key=True) +    name: Mapped[str] -class Channel(db.Model): +class Channel(Base):      """Database model representing a Discord channel."""      __tablename__ = "channels" -    id = db.Column(db.String, primary_key=True) -    name = db.Column(db.String, nullable=False) -    category_id = db.Column( -        db.String, -        db.ForeignKey("categories.id", ondelete="CASCADE"), -        nullable=True, -    ) -    is_staff = db.Column(db.Boolean, nullable=False) +    id: Mapped[str] = mapped_column(primary_key=True) +    name: Mapped[str] +    category_id: Mapped[str | None] = mapped_column(ForeignKey("categories.id", ondelete="CASCADE")) +    is_staff: Mapped[bool] -class Thread(db.Model): +class Thread(Base):      """Database model representing a Thread channel."""      __tablename__ = "threads" -    id = db.Column(db.String, primary_key=True) -    parent_channel_id = db.Column( -        db.String, -        db.ForeignKey("channels.id", ondelete="CASCADE"), -        nullable=False, -    ) -    created_at = db.Column(TZDateTime(), default=datetime.now(UTC)) -    name = db.Column(db.String, nullable=False) -    archived = db.Column(db.Boolean, default=False, nullable=False) -    auto_archive_duration = db.Column(db.Integer, nullable=False) -    locked = db.Column(db.Boolean, default=False, nullable=False) -    type = db.Column(db.String, nullable=False, index=True) - - -class User(db.Model): +    id: Mapped[str] = mapped_column(primary_key=True) +    parent_channel_id: Mapped[str] = mapped_column(ForeignKey("channels.id", ondelete="CASCADE")) +    created_at = mapped_column(TZDateTime(), default=datetime.now(UTC)) +    name: Mapped[str] +    archived: Mapped[bool] +    auto_archive_duration: Mapped[int] +    locked: Mapped[bool] +    type: Mapped[str] = mapped_column(index=True) + + +class User(Base):      """Database model representing a Discord user."""      __tablename__ = "users" -    id = db.Column(db.String, primary_key=True) -    name = db.Column(db.String, nullable=False) -    avatar_hash = db.Column(db.String, nullable=True) -    guild_avatar_hash = db.Column(db.String, nullable=True) -    joined_at = db.Column(TZDateTime(), nullable=False) -    created_at = db.Column(TZDateTime(), nullable=False) -    is_staff = db.Column(db.Boolean, nullable=False) -    bot = db.Column(db.Boolean, default=False) -    in_guild = db.Column(db.Boolean, default=True) -    public_flags = db.Column(db.JSON, default={}) -    pending = db.Column(db.Boolean, default=False) - -    @classmethod -    def bulk_upsert(cls: type, users: list[dict[str, Any]]) -> Any:  # noqa: ANN401 -        """Perform a bulk insert/update of the database to sync the user table.""" -        qs = insert(cls.__table__).values(users) - -        update_cols = [ -            "name", -            "avatar_hash", -            "guild_avatar_hash", -            "joined_at", -            "is_staff", -            "bot", -            "in_guild", -            "public_flags", -            "pending", -        ] - -        return qs.on_conflict_do_update( -            index_elements=[cls.id], -            set_={k: getattr(qs.excluded, k) for k in update_cols}, -        ).returning(cls.__table__).gino.all() - - -class Message(db.Model): +    id: Mapped[str] = mapped_column(primary_key=True) +    name: Mapped[str] = mapped_column(nullable=False) +    avatar_hash: Mapped[str] = mapped_column(nullable=True) +    guild_avatar_hash: Mapped[str] = mapped_column(nullable=True) +    joined_at = mapped_column(TZDateTime(), nullable=False) +    created_at = mapped_column(TZDateTime(), nullable=False) +    is_staff: Mapped[bool] = mapped_column(nullable=False) +    bot: Mapped[bool] = mapped_column(default=False) +    in_guild: Mapped[bool] = mapped_column(default=False) +    public_flags = mapped_column(JSON, default={}) +    pending: Mapped[bool] = mapped_column(default=False) + + +class Message(Base):      """Database model representing a message sent in a Discord server."""      __tablename__ = "messages" -    id = db.Column(db.String, primary_key=True) -    channel_id = db.Column( -        db.String, -        db.ForeignKey("channels.id", ondelete="CASCADE"), -        index=True, -    ) -    thread_id = db.Column( -        db.String, -        db.ForeignKey("threads.id", ondelete="CASCADE"), -        index=True, -    ) -    author_id = db.Column( -        db.String, -        db.ForeignKey("users.id", ondelete="CASCADE"), -        index=True, -    ) -    created_at = db.Column(TZDateTime(), default=datetime.now(UTC)) -    is_deleted = db.Column(db.Boolean, default=False) +    id: Mapped[str] = mapped_column(primary_key=True) +    channel_id: Mapped[str] = mapped_column(ForeignKey("channels.id", ondelete="CASCADE"), index=True) +    thread_id: Mapped[str | None] = mapped_column(ForeignKey("threads.id", ondelete="CASCADE"), index=True) +    author_id: Mapped[str] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), index=True) +    created_at = mapped_column(TZDateTime()) +    is_deleted: Mapped[bool] = mapped_column(default=False) diff --git a/poetry.lock b/poetry.lock index 889c3f5..38bac99 100644 --- a/poetry.lock +++ b/poetry.lock @@ -564,26 +564,81 @@ files = [  ]  [[package]] -name = "gino" -version = "1.0.1" -description = "GINO Is Not ORM - a Python asyncio ORM on SQLAlchemy core." +name = "greenlet" +version = "2.0.2" +description = "Lightweight in-process concurrent programming"  optional = false -python-versions = ">=3.5,<4.0" +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"  files = [ -    {file = "gino-1.0.1-py3-none-any.whl", hash = "sha256:56df57cfdefbaf897a7c4897c265a0e91a8cca80716fb64f7d3cf6d501fdfb3d"}, -    {file = "gino-1.0.1.tar.gz", hash = "sha256:fe4189e82fe9d20c4a5f03fc775fb91c168061c5176b4c95623caeef22316150"}, +    {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, +    {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, +    {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, +    {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, +    {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, +    {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, +    {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, +    {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, +    {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, +    {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, +    {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, +    {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, +    {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, +    {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, +    {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, +    {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, +    {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, +    {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, +    {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, +    {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, +    {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, +    {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, +    {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, +    {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, +    {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, +    {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, +    {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, +    {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, +    {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, +    {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, +    {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, +    {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, +    {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, +    {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, +    {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, +    {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, +    {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, +    {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, +    {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, +    {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, +    {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, +    {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, +    {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, +    {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, +    {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, +    {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, +    {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, +    {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, +    {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, +    {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, +    {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, +    {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, +    {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, +    {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, +    {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, +    {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, +    {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, +    {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, +    {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, +    {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, +    {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, +    {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, +    {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, +    {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"},  ] -[package.dependencies] -asyncpg = ">=0.18,<1.0" -SQLAlchemy = ">=1.2.16,<1.4" -  [package.extras] -aiohttp = ["gino-aiohttp (>=0.1.0,<0.2.0)"] -quart = ["gino-quart (>=0.1.0,<0.2.0)"] -sanic = ["gino-sanic (>=0.1.0,<0.2.0)"] -starlette = ["gino-starlette (>=0.1.1,<0.2.0)"] -tornado = ["gino-tornado (>=0.1.0,<0.2.0)"] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["objgraph", "psutil"]  [[package]]  name = "humanfriendly" @@ -833,75 +888,6 @@ pyyaml = ">=5.1"  virtualenv = ">=20.10.0"  [[package]] -name = "psycopg2-binary" -version = "2.9.7" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -optional = false -python-versions = ">=3.6" -files = [ -    {file = "psycopg2-binary-2.9.7.tar.gz", hash = "sha256:1b918f64a51ffe19cd2e230b3240ba481330ce1d4b7875ae67305bd1d37b041c"}, -    {file = "psycopg2_binary-2.9.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ea5f8ee87f1eddc818fc04649d952c526db4426d26bab16efbe5a0c52b27d6ab"}, -    {file = "psycopg2_binary-2.9.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2993ccb2b7e80844d534e55e0f12534c2871952f78e0da33c35e648bf002bbff"}, -    {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbbc3c5d15ed76b0d9db7753c0db40899136ecfe97d50cbde918f630c5eb857a"}, -    {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:692df8763b71d42eb8343f54091368f6f6c9cfc56dc391858cdb3c3ef1e3e584"}, -    {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dcfd5d37e027ec393a303cc0a216be564b96c80ba532f3d1e0d2b5e5e4b1e6e"}, -    {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17cc17a70dfb295a240db7f65b6d8153c3d81efb145d76da1e4a096e9c5c0e63"}, -    {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e5666632ba2b0d9757b38fc17337d84bdf932d38563c5234f5f8c54fd01349c9"}, -    {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7db7b9b701974c96a88997d458b38ccb110eba8f805d4b4f74944aac48639b42"}, -    {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c82986635a16fb1fa15cd5436035c88bc65c3d5ced1cfaac7f357ee9e9deddd4"}, -    {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4fe13712357d802080cfccbf8c6266a3121dc0e27e2144819029095ccf708372"}, -    {file = "psycopg2_binary-2.9.7-cp310-cp310-win32.whl", hash = "sha256:122641b7fab18ef76b18860dd0c772290566b6fb30cc08e923ad73d17461dc63"}, -    {file = "psycopg2_binary-2.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:f8651cf1f144f9ee0fa7d1a1df61a9184ab72962531ca99f077bbdcba3947c58"}, -    {file = "psycopg2_binary-2.9.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4ecc15666f16f97709106d87284c136cdc82647e1c3f8392a672616aed3c7151"}, -    {file = "psycopg2_binary-2.9.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fbb1184c7e9d28d67671992970718c05af5f77fc88e26fd7136613c4ece1f89"}, -    {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7968fd20bd550431837656872c19575b687f3f6f98120046228e451e4064df"}, -    {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:094af2e77a1976efd4956a031028774b827029729725e136514aae3cdf49b87b"}, -    {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26484e913d472ecb6b45937ea55ce29c57c662066d222fb0fbdc1fab457f18c5"}, -    {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f309b77a7c716e6ed9891b9b42953c3ff7d533dc548c1e33fddc73d2f5e21f9"}, -    {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d92e139ca388ccfe8c04aacc163756e55ba4c623c6ba13d5d1595ed97523e4b"}, -    {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2df562bb2e4e00ee064779902d721223cfa9f8f58e7e52318c97d139cf7f012d"}, -    {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4eec5d36dbcfc076caab61a2114c12094c0b7027d57e9e4387b634e8ab36fd44"}, -    {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1011eeb0c51e5b9ea1016f0f45fa23aca63966a4c0afcf0340ccabe85a9f65bd"}, -    {file = "psycopg2_binary-2.9.7-cp311-cp311-win32.whl", hash = "sha256:ded8e15f7550db9e75c60b3d9fcbc7737fea258a0f10032cdb7edc26c2a671fd"}, -    {file = "psycopg2_binary-2.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:8a136c8aaf6615653450817a7abe0fc01e4ea720ae41dfb2823eccae4b9062a3"}, -    {file = "psycopg2_binary-2.9.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2dec5a75a3a5d42b120e88e6ed3e3b37b46459202bb8e36cd67591b6e5feebc1"}, -    {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc10da7e7df3380426521e8c1ed975d22df678639da2ed0ec3244c3dc2ab54c8"}, -    {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee919b676da28f78f91b464fb3e12238bd7474483352a59c8a16c39dfc59f0c5"}, -    {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb1c0e682138f9067a58fc3c9a9bf1c83d8e08cfbee380d858e63196466d5c86"}, -    {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00d8db270afb76f48a499f7bb8fa70297e66da67288471ca873db88382850bf4"}, -    {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9b0c2b466b2f4d89ccc33784c4ebb1627989bd84a39b79092e560e937a11d4ac"}, -    {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:51d1b42d44f4ffb93188f9b39e6d1c82aa758fdb8d9de65e1ddfe7a7d250d7ad"}, -    {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:11abdbfc6f7f7dea4a524b5f4117369b0d757725798f1593796be6ece20266cb"}, -    {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f02f4a72cc3ab2565c6d9720f0343cb840fb2dc01a2e9ecb8bc58ccf95dc5c06"}, -    {file = "psycopg2_binary-2.9.7-cp37-cp37m-win32.whl", hash = "sha256:81d5dd2dd9ab78d31a451e357315f201d976c131ca7d43870a0e8063b6b7a1ec"}, -    {file = "psycopg2_binary-2.9.7-cp37-cp37m-win_amd64.whl", hash = "sha256:62cb6de84d7767164a87ca97e22e5e0a134856ebcb08f21b621c6125baf61f16"}, -    {file = "psycopg2_binary-2.9.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:59f7e9109a59dfa31efa022e94a244736ae401526682de504e87bd11ce870c22"}, -    {file = "psycopg2_binary-2.9.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:95a7a747bdc3b010bb6a980f053233e7610276d55f3ca506afff4ad7749ab58a"}, -    {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c721ee464e45ecf609ff8c0a555018764974114f671815a0a7152aedb9f3343"}, -    {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4f37bbc6588d402980ffbd1f3338c871368fb4b1cfa091debe13c68bb3852b3"}, -    {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac83ab05e25354dad798401babaa6daa9577462136ba215694865394840e31f8"}, -    {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:024eaeb2a08c9a65cd5f94b31ace1ee3bb3f978cd4d079406aef85169ba01f08"}, -    {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1c31c2606ac500dbd26381145684d87730a2fac9a62ebcfbaa2b119f8d6c19f4"}, -    {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:42a62ef0e5abb55bf6ffb050eb2b0fcd767261fa3faf943a4267539168807522"}, -    {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7952807f95c8eba6a8ccb14e00bf170bb700cafcec3924d565235dffc7dc4ae8"}, -    {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e02bc4f2966475a7393bd0f098e1165d470d3fa816264054359ed4f10f6914ea"}, -    {file = "psycopg2_binary-2.9.7-cp38-cp38-win32.whl", hash = "sha256:fdca0511458d26cf39b827a663d7d87db6f32b93efc22442a742035728603d5f"}, -    {file = "psycopg2_binary-2.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:d0b16e5bb0ab78583f0ed7ab16378a0f8a89a27256bb5560402749dbe8a164d7"}, -    {file = "psycopg2_binary-2.9.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6822c9c63308d650db201ba22fe6648bd6786ca6d14fdaf273b17e15608d0852"}, -    {file = "psycopg2_binary-2.9.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f94cb12150d57ea433e3e02aabd072205648e86f1d5a0a692d60242f7809b15"}, -    {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5ee89587696d808c9a00876065d725d4ae606f5f7853b961cdbc348b0f7c9a1"}, -    {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad5ec10b53cbb57e9a2e77b67e4e4368df56b54d6b00cc86398578f1c635f329"}, -    {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:642df77484b2dcaf87d4237792246d8068653f9e0f5c025e2c692fc56b0dda70"}, -    {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6a8b575ac45af1eaccbbcdcf710ab984fd50af048fe130672377f78aaff6fc1"}, -    {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f955aa50d7d5220fcb6e38f69ea126eafecd812d96aeed5d5f3597f33fad43bb"}, -    {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad26d4eeaa0d722b25814cce97335ecf1b707630258f14ac4d2ed3d1d8415265"}, -    {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ced63c054bdaf0298f62681d5dcae3afe60cbae332390bfb1acf0e23dcd25fc8"}, -    {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2b04da24cbde33292ad34a40db9832a80ad12de26486ffeda883413c9e1b1d5e"}, -    {file = "psycopg2_binary-2.9.7-cp39-cp39-win32.whl", hash = "sha256:18f12632ab516c47c1ac4841a78fddea6508a8284c7cf0f292cb1a523f2e2379"}, -    {file = "psycopg2_binary-2.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb3b8d55924a6058a26db69fb1d3e7e32695ff8b491835ba9f479537e14dcf9f"}, -] - -[[package]]  name = "pycares"  version = "4.3.0"  description = "Python interface for c-ares" @@ -1264,58 +1250,81 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (  [[package]]  name = "sqlalchemy" -version = "1.3.24" +version = "2.0.20"  description = "Database Abstraction Library"  optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7"  files = [ -    {file = "SQLAlchemy-1.3.24-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:87a2725ad7d41cd7376373c15fd8bf674e9c33ca56d0b8036add2d634dba372e"}, -    {file = "SQLAlchemy-1.3.24-cp27-cp27m-win32.whl", hash = "sha256:f597a243b8550a3a0b15122b14e49d8a7e622ba1c9d29776af741f1845478d79"}, -    {file = "SQLAlchemy-1.3.24-cp27-cp27m-win_amd64.whl", hash = "sha256:fc4cddb0b474b12ed7bdce6be1b9edc65352e8ce66bc10ff8cbbfb3d4047dbf4"}, -    {file = "SQLAlchemy-1.3.24-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:f1149d6e5c49d069163e58a3196865e4321bad1803d7886e07d8710de392c548"}, -    {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:14f0eb5db872c231b20c18b1e5806352723a3a89fb4254af3b3e14f22eaaec75"}, -    {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:e98d09f487267f1e8d1179bf3b9d7709b30a916491997137dd24d6ae44d18d79"}, -    {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:fc1f2a5a5963e2e73bac4926bdaf7790c4d7d77e8fc0590817880e22dd9d0b8b"}, -    {file = "SQLAlchemy-1.3.24-cp35-cp35m-win32.whl", hash = "sha256:f3c5c52f7cb8b84bfaaf22d82cb9e6e9a8297f7c2ed14d806a0f5e4d22e83fb7"}, -    {file = "SQLAlchemy-1.3.24-cp35-cp35m-win_amd64.whl", hash = "sha256:0352db1befcbed2f9282e72843f1963860bf0e0472a4fa5cf8ee084318e0e6ab"}, -    {file = "SQLAlchemy-1.3.24-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:2ed6343b625b16bcb63c5b10523fd15ed8934e1ed0f772c534985e9f5e73d894"}, -    {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:34fcec18f6e4b24b4a5f6185205a04f1eab1e56f8f1d028a2a03694ebcc2ddd4"}, -    {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e47e257ba5934550d7235665eee6c911dc7178419b614ba9e1fbb1ce6325b14f"}, -    {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:816de75418ea0953b5eb7b8a74933ee5a46719491cd2b16f718afc4b291a9658"}, -    {file = "SQLAlchemy-1.3.24-cp36-cp36m-win32.whl", hash = "sha256:26155ea7a243cbf23287f390dba13d7927ffa1586d3208e0e8d615d0c506f996"}, -    {file = "SQLAlchemy-1.3.24-cp36-cp36m-win_amd64.whl", hash = "sha256:f03bd97650d2e42710fbe4cf8a59fae657f191df851fc9fc683ecef10746a375"}, -    {file = "SQLAlchemy-1.3.24-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a006d05d9aa052657ee3e4dc92544faae5fcbaafc6128217310945610d862d39"}, -    {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1e2f89d2e5e3c7a88e25a3b0e43626dba8db2aa700253023b82e630d12b37109"}, -    {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0d5d862b1cfbec5028ce1ecac06a3b42bc7703eb80e4b53fceb2738724311443"}, -    {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:0172423a27fbcae3751ef016663b72e1a516777de324a76e30efa170dbd3dd2d"}, -    {file = "SQLAlchemy-1.3.24-cp37-cp37m-win32.whl", hash = "sha256:d37843fb8df90376e9e91336724d78a32b988d3d20ab6656da4eb8ee3a45b63c"}, -    {file = "SQLAlchemy-1.3.24-cp37-cp37m-win_amd64.whl", hash = "sha256:c10ff6112d119f82b1618b6dc28126798481b9355d8748b64b9b55051eb4f01b"}, -    {file = "SQLAlchemy-1.3.24-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:861e459b0e97673af6cc5e7f597035c2e3acdfb2608132665406cded25ba64c7"}, -    {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5de2464c254380d8a6c20a2746614d5a436260be1507491442cf1088e59430d2"}, -    {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d375d8ccd3cebae8d90270f7aa8532fe05908f79e78ae489068f3b4eee5994e8"}, -    {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:014ea143572fee1c18322b7908140ad23b3994036ef4c0d630110faf942652f8"}, -    {file = "SQLAlchemy-1.3.24-cp38-cp38-win32.whl", hash = "sha256:6607ae6cd3a07f8a4c3198ffbf256c261661965742e2b5265a77cd5c679c9bba"}, -    {file = "SQLAlchemy-1.3.24-cp38-cp38-win_amd64.whl", hash = "sha256:fcb251305fa24a490b6a9ee2180e5f8252915fb778d3dafc70f9cc3f863827b9"}, -    {file = "SQLAlchemy-1.3.24-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01aa5f803db724447c1d423ed583e42bf5264c597fd55e4add4301f163b0be48"}, -    {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4d0e3515ef98aa4f0dc289ff2eebb0ece6260bbf37c2ea2022aad63797eacf60"}, -    {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:bce28277f308db43a6b4965734366f533b3ff009571ec7ffa583cb77539b84d6"}, -    {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:8110e6c414d3efc574543109ee618fe2c1f96fa31833a1ff36cc34e968c4f233"}, -    {file = "SQLAlchemy-1.3.24-cp39-cp39-win32.whl", hash = "sha256:ee5f5188edb20a29c1cc4a039b074fdc5575337c9a68f3063449ab47757bb064"}, -    {file = "SQLAlchemy-1.3.24-cp39-cp39-win_amd64.whl", hash = "sha256:09083c2487ca3c0865dc588e07aeaa25416da3d95f7482c07e92f47e080aa17b"}, -    {file = "SQLAlchemy-1.3.24.tar.gz", hash = "sha256:ebbb777cbf9312359b897bf81ba00dae0f5cb69fba2a18265dcc18a6f5ef7519"}, +    {file = "SQLAlchemy-2.0.20-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759b51346aa388c2e606ee206c0bc6f15a5299f6174d1e10cadbe4530d3c7a98"}, +    {file = "SQLAlchemy-2.0.20-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1506e988ebeaaf316f183da601f24eedd7452e163010ea63dbe52dc91c7fc70e"}, +    {file = "SQLAlchemy-2.0.20-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5768c268df78bacbde166b48be788b83dddaa2a5974b8810af422ddfe68a9bc8"}, +    {file = "SQLAlchemy-2.0.20-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3f0dd6d15b6dc8b28a838a5c48ced7455c3e1fb47b89da9c79cc2090b072a50"}, +    {file = "SQLAlchemy-2.0.20-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:243d0fb261f80a26774829bc2cee71df3222587ac789b7eaf6555c5b15651eed"}, +    {file = "SQLAlchemy-2.0.20-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb6d77c31e1bf4268b4d61b549c341cbff9842f8e115ba6904249c20cb78a61"}, +    {file = "SQLAlchemy-2.0.20-cp310-cp310-win32.whl", hash = "sha256:bcb04441f370cbe6e37c2b8d79e4af9e4789f626c595899d94abebe8b38f9a4d"}, +    {file = "SQLAlchemy-2.0.20-cp310-cp310-win_amd64.whl", hash = "sha256:d32b5ffef6c5bcb452723a496bad2d4c52b346240c59b3e6dba279f6dcc06c14"}, +    {file = "SQLAlchemy-2.0.20-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dd81466bdbc82b060c3c110b2937ab65ace41dfa7b18681fdfad2f37f27acdd7"}, +    {file = "SQLAlchemy-2.0.20-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6fe7d61dc71119e21ddb0094ee994418c12f68c61b3d263ebaae50ea8399c4d4"}, +    {file = "SQLAlchemy-2.0.20-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4e571af672e1bb710b3cc1a9794b55bce1eae5aed41a608c0401885e3491179"}, +    {file = "SQLAlchemy-2.0.20-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3364b7066b3c7f4437dd345d47271f1251e0cfb0aba67e785343cdbdb0fff08c"}, +    {file = "SQLAlchemy-2.0.20-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1be86ccea0c965a1e8cd6ccf6884b924c319fcc85765f16c69f1ae7148eba64b"}, +    {file = "SQLAlchemy-2.0.20-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1d35d49a972649b5080557c603110620a86aa11db350d7a7cb0f0a3f611948a0"}, +    {file = "SQLAlchemy-2.0.20-cp311-cp311-win32.whl", hash = "sha256:27d554ef5d12501898d88d255c54eef8414576f34672e02fe96d75908993cf53"}, +    {file = "SQLAlchemy-2.0.20-cp311-cp311-win_amd64.whl", hash = "sha256:411e7f140200c02c4b953b3dbd08351c9f9818d2bd591b56d0fa0716bd014f1e"}, +    {file = "SQLAlchemy-2.0.20-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3c6aceebbc47db04f2d779db03afeaa2c73ea3f8dcd3987eb9efdb987ffa09a3"}, +    {file = "SQLAlchemy-2.0.20-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d3f175410a6db0ad96b10bfbb0a5530ecd4fcf1e2b5d83d968dd64791f810ed"}, +    {file = "SQLAlchemy-2.0.20-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea8186be85da6587456c9ddc7bf480ebad1a0e6dcbad3967c4821233a4d4df57"}, +    {file = "SQLAlchemy-2.0.20-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c3d99ba99007dab8233f635c32b5cd24fb1df8d64e17bc7df136cedbea427897"}, +    {file = "SQLAlchemy-2.0.20-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:76fdfc0f6f5341987474ff48e7a66c3cd2b8a71ddda01fa82fedb180b961630a"}, +    {file = "SQLAlchemy-2.0.20-cp37-cp37m-win32.whl", hash = "sha256:d3793dcf5bc4d74ae1e9db15121250c2da476e1af8e45a1d9a52b1513a393459"}, +    {file = "SQLAlchemy-2.0.20-cp37-cp37m-win_amd64.whl", hash = "sha256:79fde625a0a55220d3624e64101ed68a059c1c1f126c74f08a42097a72ff66a9"}, +    {file = "SQLAlchemy-2.0.20-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:599ccd23a7146e126be1c7632d1d47847fa9f333104d03325c4e15440fc7d927"}, +    {file = "SQLAlchemy-2.0.20-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1a58052b5a93425f656675673ef1f7e005a3b72e3f2c91b8acca1b27ccadf5f4"}, +    {file = "SQLAlchemy-2.0.20-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79543f945be7a5ada9943d555cf9b1531cfea49241809dd1183701f94a748624"}, +    {file = "SQLAlchemy-2.0.20-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63e73da7fb030ae0a46a9ffbeef7e892f5def4baf8064786d040d45c1d6d1dc5"}, +    {file = "SQLAlchemy-2.0.20-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ce5e81b800a8afc870bb8e0a275d81957e16f8c4b62415a7b386f29a0cb9763"}, +    {file = "SQLAlchemy-2.0.20-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb0d3e94c2a84215532d9bcf10229476ffd3b08f481c53754113b794afb62d14"}, +    {file = "SQLAlchemy-2.0.20-cp38-cp38-win32.whl", hash = "sha256:8dd77fd6648b677d7742d2c3cc105a66e2681cc5e5fb247b88c7a7b78351cf74"}, +    {file = "SQLAlchemy-2.0.20-cp38-cp38-win_amd64.whl", hash = "sha256:6f8a934f9dfdf762c844e5164046a9cea25fabbc9ec865c023fe7f300f11ca4a"}, +    {file = "SQLAlchemy-2.0.20-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:26a3399eaf65e9ab2690c07bd5cf898b639e76903e0abad096cd609233ce5208"}, +    {file = "SQLAlchemy-2.0.20-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4cde2e1096cbb3e62002efdb7050113aa5f01718035ba9f29f9d89c3758e7e4e"}, +    {file = "SQLAlchemy-2.0.20-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b09ba72e4e6d341bb5bdd3564f1cea6095d4c3632e45dc69375a1dbe4e26ec"}, +    {file = "SQLAlchemy-2.0.20-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b74eeafaa11372627ce94e4dc88a6751b2b4d263015b3523e2b1e57291102f0"}, +    {file = "SQLAlchemy-2.0.20-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:77d37c1b4e64c926fa3de23e8244b964aab92963d0f74d98cbc0783a9e04f501"}, +    {file = "SQLAlchemy-2.0.20-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eefebcc5c555803065128401a1e224a64607259b5eb907021bf9b175f315d2a6"}, +    {file = "SQLAlchemy-2.0.20-cp39-cp39-win32.whl", hash = "sha256:3423dc2a3b94125094897118b52bdf4d37daf142cbcf26d48af284b763ab90e9"}, +    {file = "SQLAlchemy-2.0.20-cp39-cp39-win_amd64.whl", hash = "sha256:5ed61e3463021763b853628aef8bc5d469fe12d95f82c74ef605049d810f3267"}, +    {file = "SQLAlchemy-2.0.20-py3-none-any.whl", hash = "sha256:63a368231c53c93e2b67d0c5556a9836fdcd383f7e3026a39602aad775b14acf"}, +    {file = "SQLAlchemy-2.0.20.tar.gz", hash = "sha256:ca8a5ff2aa7f3ade6c498aaafce25b1eaeabe4e42b73e25519183e4566a16fc6"},  ] +[package.dependencies] +greenlet = {version = "!=0.4.17", optional = true, markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or extra == \"asyncio\""} +typing-extensions = ">=4.2.0" +  [package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"]  mssql = ["pyodbc"]  mssql-pymssql = ["pymssql"]  mssql-pyodbc = ["pyodbc"] -mysql = ["mysqlclient"] -oracle = ["cx-oracle"] -postgresql = ["psycopg2"] -postgresql-pg8000 = ["pg8000 (<1.16.6)"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"]  postgresql-psycopg2binary = ["psycopg2-binary"]  postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3-binary"]  [[package]]  name = "statsd" @@ -1460,4 +1469,4 @@ multidict = ">=4.0"  [metadata]  lock-version = "2.0"  python-versions = "3.11.*" -content-hash = "381d9b82c08a44b0567a7ef61c58c240aa53108cc38ba12c3b3721b067836c59" +content-hash = "2f8f21b6051b017dc39a9376d4728355e786ab5a60ef9efcde8bff6b6e5734ba" diff --git a/pyproject.toml b/pyproject.toml index f8f39e0..371255b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,10 +14,10 @@ pydis-core = "10.2.0"  alembic = "1.12.0"  coloredlogs = "15.0.1"  deepmerge = "1.1.0" -gino = "1.0.1" +sqlalchemy = { extras = ["asyncio"], version = "2.0.20" }  python-dotenv = "1.0.0" -psycopg2-binary = "2.9.7"  toml = "0.10.2" +asyncpg = "0.28.0"  [tool.poetry.dev-dependencies]  pre-commit = "3.4.0" @@ -32,7 +32,7 @@ build-backend = "poetry.masonry.api"  [tool.ruff]  target-version = "py311" -extend-exclude = [".cache", "alembic"] +extend-exclude = [".cache"]  ignore = [      "ANN002",      "ANN003", @@ -56,3 +56,4 @@ select = ["ALL"]  [tool.ruff.per-file-ignores]  "metricity/models.py" = ["A003"] +"alembic/*" = ["INP001"] | 
