diff --git a/README.md b/README.md index 48dc9cc..a3740b0 100644 --- a/README.md +++ b/README.md @@ -278,6 +278,13 @@ loom = Loom( logs_filename="logs.sql", port=5432, ) + +# OR with connection_uri +loom = Loom( + dialect="mysql", + connection_uri = "mysql://root:root@localhost:3306/hi", + # ... +) ``` The `Loom` class takes in the following options: diff --git a/dataloom/keys.py b/dataloom/keys.py index 59e757b..4410e1f 100644 --- a/dataloom/keys.py +++ b/dataloom/keys.py @@ -1,7 +1,7 @@ # Configuration file for unit testing. -push = True +push = False class PgConfig: diff --git a/dataloom/loom/__init__.py b/dataloom/loom/__init__.py index 82e1be2..b5acd58 100644 --- a/dataloom/loom/__init__.py +++ b/dataloom/loom/__init__.py @@ -69,6 +69,15 @@ class Loom(ILoom): """ + def __get_database_name(self, uri: str) -> str | None: + if self.dialect == "postgres" or self.dialect == "mysql": + from urllib.parse import urlparse + + components = urlparse(uri) + db = components.path.lstrip("/") + return db + return None + def __init__( self, dialect: DIALECT_LITERAL, @@ -81,12 +90,16 @@ def __init__( sql_logger: Optional[SQL_LOGGER_LITERAL] = None, logs_filename: Optional[str] = "dataloom.sql", ) -> None: - self.database = database self.conn = None self.sql_logger = sql_logger self.dialect = dialect self.logs_filename = logs_filename self.connection_uri = connection_uri + self.database = ( + database + if self.connection_uri is None + else self.__get_database_name(self.connection_uri) + ) try: config = instances[dialect] @@ -1128,7 +1141,7 @@ def tables(self) -> list[str]: """ sql = GetStatement(self.dialect)._get_tables_command - res = self._execute_sql(sql, fetchall=True) + res = self._execute_sql(sql, fetchall=True, _verbose=0) if self.dialect == "sqlite": return [t[0] for t in res if not str(t[0]).lower().startswith("sqlite_")] return [t[0] for t in res] @@ -1331,19 +1344,8 @@ def connect_and_sync( sql_logger=self.sql_logger, logs_filename=self.logs_filename, ) - for model in models: - if drop or force: - self._execute_sql(model._drop_sql(dialect=self.dialect)) - for sql in model._create_sql(dialect=self.dialect): - if sql is not None: - self._execute_sql(sql) - elif alter: - pass - else: - for sql in model._create_sql(dialect=self.dialect): - if sql is not None: - self._execute_sql(sql) - return self.conn, self.tables + tables = self.sync(models=models, drop=drop, force=force, alter=alter) + return self.conn, tables except Exception as e: raise Exception(e) @@ -1407,7 +1409,37 @@ def sync( if sql is not None: self._execute_sql(sql) elif alter: - pass + # 1. we only alter the table if it does exists + # 2. if not we just have to create a new table + if model._get_table_name() in self.tables: + sql1 = model._get_describe_stm( + dialect=self.dialect, fields=["column_name"] + ) + args = None + if self.dialect == "mysql": + args = (self.database, model._get_table_name()) + elif self.dialect == "postgres": + args = ("public", model._get_table_name()) + elif self.dialect == "sqlite": + args = () + cols = self._execute_sql( + sql1, _verbose=0, args=args, fetchall=True + ) + if cols is not None: + if self.dialect == "mysql": + old_columns = [col for (col,) in cols] + elif self.dialect == "postgres": + old_columns = [col for (col,) in cols] + else: + old_columns = [col[1] for col in cols] + sql = model._alter_sql( + dialect=self.dialect, old_columns=old_columns + ) + self._execute_sql(sql) + else: + for sql in model._create_sql(dialect=self.dialect): + if sql is not None: + self._execute_sql(sql) else: for sql in model._create_sql(dialect=self.dialect): if sql is not None: diff --git a/dataloom/model/__init__.py b/dataloom/model/__init__.py index fcd8f19..ae02f0a 100644 --- a/dataloom/model/__init__.py +++ b/dataloom/model/__init__.py @@ -55,12 +55,18 @@ class Model: """ @classmethod - def _create_sql(cls, dialect: DIALECT_LITERAL, ignore_exists=True): + def _create_sql(cls, dialect: DIALECT_LITERAL): sqls = GetStatement( dialect=dialect, model=cls, table_name=cls._get_table_name() )._get_create_table_command return sqls + @classmethod + def _alter_sql(cls, dialect: DIALECT_LITERAL, old_columns: list[str]): + return GetStatement( + dialect=dialect, model=cls, table_name=cls._get_table_name() + )._get_alter_table_command(old_columns=old_columns) + @classmethod def _get_table_name(self): __tablename__ = None diff --git a/dataloom/statements/__init__.py b/dataloom/statements/__init__.py index c45334d..09ba74a 100644 --- a/dataloom/statements/__init__.py +++ b/dataloom/statements/__init__.py @@ -18,6 +18,7 @@ get_relationships, get_create_table_params, get_table_fields, + get_alter_table_params, ) @@ -147,28 +148,25 @@ def _get_tables_command(self) -> Optional[str]: return sql @property - def _get_create_table_command(self) -> Optional[str]: + def _get_create_table_command(self) -> Optional[list[str]]: # is the primary key defined in this table? _, pk_name, _, _ = get_table_fields(model=self.model, dialect=self.dialect) - pks, user_fields, predefined_fields, sql2 = get_create_table_params( + pks, user_fields, predefined_fields = get_create_table_params( dialect=self.dialect, model=self.model, - child_alias_name=self.model.__name__.lower(), - child_pk_name=pk_name, - child_name=self.model._get_table_name(), ) + + if len(pks) == 0: + raise PkNotDefinedException( + "Your table does not have a primary key column." + ) + if len(pks) > 1: + raise TooManyPkException( + f"You have defined many field as primary keys which is not allowed. Fields ({', '.join(pks)}) are primary keys." + ) + fields = [*user_fields, *predefined_fields] + fields_name = ", ".join(f for f in [" ".join(field) for field in fields]) if self.dialect == "postgres": - # do we have a single primary key or not? - if len(pks) == 0: - raise PkNotDefinedException( - "Your table does not have a primary key column." - ) - if len(pks) > 1: - raise TooManyPkException( - f"You have defined many field as primary keys which is not allowed. Fields ({', '.join(pks)}) are primary keys." - ) - fields = [*user_fields, *predefined_fields] - fields_name = ", ".join(f for f in [" ".join(field) for field in fields]) sql = ( PgStatements.CREATE_NEW_TABLE.format( table_name=f'"{self.table_name}"', fields_name=fields_name @@ -180,17 +178,6 @@ def _get_create_table_command(self) -> Optional[str]: ) elif self.dialect == "mysql": - # do we have a single primary key or not? - if len(pks) == 0: - raise PkNotDefinedException( - "Your table does not have a primary key column." - ) - if len(pks) > 1: - raise TooManyPkException( - f"You have defined many field as primary keys which is not allowed. Fields ({', '.join(pks)}) are primary keys." - ) - fields = [*user_fields, *predefined_fields] - fields_name = ", ".join(f for f in [" ".join(field) for field in fields]) sql = ( MySqlStatements.CREATE_NEW_TABLE.format( table_name=f"`{self.table_name}`", fields_name=fields_name @@ -202,23 +189,12 @@ def _get_create_table_command(self) -> Optional[str]: ) elif self.dialect == "sqlite": - # do we have a single primary key or not? - if len(pks) == 0: - raise PkNotDefinedException( - "Your table does not have a primary key column." - ) - if len(pks) > 1: - raise TooManyPkException( - f"You have defined many field as primary keys which is not allowed. Fields ({', '.join(pks)}) are primary keys." - ) - fields = [*user_fields, *predefined_fields] - fields_name = ", ".join(f for f in [" ".join(field) for field in fields]) sql = ( - MySqlStatements.CREATE_NEW_TABLE.format( + Sqlite3Statements.CREATE_NEW_TABLE.format( table_name=f"`{self.table_name}`", fields_name=fields_name ) if not self.ignore_exists - else MySqlStatements.CREATE_NEW_TABLE_IF_NOT_EXITS.format( + else Sqlite3Statements.CREATE_NEW_TABLE_IF_NOT_EXITS.format( table_name=f"`{self.table_name}`", fields_name=fields_name ) ) @@ -227,7 +203,7 @@ def _get_create_table_command(self) -> Optional[str]: raise UnsupportedDialectException( "The dialect passed is not supported the supported dialects are: {'postgres', 'mysql', 'sqlite'}" ) - return [sql, sql2] + return [sql] def _get_select_where_command( self, @@ -851,3 +827,43 @@ def _get_pk_command( "The dialect passed is not supported the supported dialects are: {'postgres', 'mysql', 'sqlite'}" ) return sql + + def _get_alter_table_command(self, old_columns: list[str]) -> str: + """ + 1. get table columns + 2. check if is new column + 3. check if the column has been removed + """ + _, pk_name, _, _ = get_table_fields(model=self.model, dialect=self.dialect) + pks, alterations = get_alter_table_params( + dialect=self.dialect, model=self.model, old_columns=old_columns + ) + + alterations = ", ".join(alterations) + # do we have a single primary key or not? + if len(pks) == 0: + raise PkNotDefinedException( + "Your table does not have a primary key column." + ) + if len(pks) > 1: + raise TooManyPkException( + f"You have defined many field as primary keys which is not allowed. Fields ({', '.join(pks)}) are primary keys." + ) + if self.dialect == "postgres": + sql = PgStatements.ALTER_TABLE_COMMAND.format( + table_name=f'"{self.table_name}"', alterations=alterations + ) + elif self.dialect == "mysql": + sql = MySqlStatements.ALTER_TABLE_COMMAND.format( + table_name=f"`{self.table_name}`", alterations=alterations + ) + + elif self.dialect == "sqlite": + sql = Sqlite3Statements.ALTER_TABLE_COMMAND.format( + table_name=f"`{self.table_name}`", alterations=alterations + ) + else: + raise UnsupportedDialectException( + "The dialect passed is not supported the supported dialects are: {'postgres', 'mysql', 'sqlite'}" + ) + return sql diff --git a/dataloom/statements/statements.py b/dataloom/statements/statements.py index 18591ed..473a672 100644 --- a/dataloom/statements/statements.py +++ b/dataloom/statements/statements.py @@ -1,4 +1,10 @@ class MySqlStatements: + # Altering tables + + ALTER_TABLE_COMMAND = """ + ALTER TABLE {table_name} {alterations}; + """ + # describing tables DESCRIBE_TABLE_COMMAND = """ @@ -137,6 +143,11 @@ class MySqlStatements: class Sqlite3Statements: + # Altering tables + + ALTER_TABLE_COMMAND = """ + ALTER TABLE {table_name} {alterations}; + """ # describing table DESCRIBE_TABLE_COMMAND = """PRAGMA table_info({table_name});""" @@ -254,6 +265,11 @@ class Sqlite3Statements: class PgStatements: + # Altering tables + + ALTER_TABLE_COMMAND = """ + ALTER TABLE {table_name} {alterations}; + """ # describing table DESCRIBE_TABLE_COMMAND = """ SELECT {fields} diff --git a/dataloom/utils/__init__.py b/dataloom/utils/__init__.py index c48402b..f598339 100644 --- a/dataloom/utils/__init__.py +++ b/dataloom/utils/__init__.py @@ -2,6 +2,7 @@ from dataloom.utils.logger import console_logger, file_logger from dataloom.utils.create_table import get_create_table_params +from dataloom.utils.alter_table import get_alter_table_params from dataloom.utils.aggregations import get_groups from dataloom.utils.helpers import is_collection from dataloom.utils.tables import ( @@ -162,4 +163,5 @@ def get_formatted_query( print_pretty_table, is_collection, get_groups, + get_alter_table_params, ] diff --git a/dataloom/utils/alter_table.py b/dataloom/utils/alter_table.py new file mode 100644 index 0000000..1690a2d --- /dev/null +++ b/dataloom/utils/alter_table.py @@ -0,0 +1,117 @@ +import inspect +from dataloom.columns import ( + PrimaryKeyColumn, + ForeignKeyColumn, + Column, + CreatedAtColumn, + UpdatedAtColumn, +) +from dataloom.types import DIALECT_LITERAL +import re + + +def get_alter_table_params(model, dialect: DIALECT_LITERAL, old_columns: list[str]): + pks = [] + alterations = [] + + # add or modify columns + for name, field in inspect.getmembers(model): + if isinstance(field, PrimaryKeyColumn): + col = f'"{name}"' if dialect == "postgres" else f"`{name}`" + pks.append(col) + old_columns = [c for c in old_columns if c != name] + + elif isinstance(field, Column): + col = f'"{name}"' if dialect == "postgres" else f"`{name}`" + _values = re.sub( + r"\s+", + " ", + "{_type} {unique} {nullable} {default} ".format( + _type=field.sql_type(dialect), + unique=field.unique_constraint, + nullable=field.nullable_constraint, + default=field.default_constraint, + ).strip(), + ) + old_name = f'"{name}"' if dialect == "postgres" else f"`{name}`" + if name in old_columns: + old_columns = [c for c in old_columns if c != name] + if dialect == "mysql": + alterations.append(f"MODIFY COLUMN {col} {_values}") + elif dialect == "postgres": + alterations.append(f"RENAME COLUMN {old_name} TO {col}") + elif dialect == "sqlite": + alterations.append(f"RENAME COLUMN {old_name} TO {col}") + else: + if dialect == "mysql": + alterations.append(f"ADD {col} {_values}") + else: + alterations.append(f"ADD COLUMN {col} {_values}") + + elif isinstance(field, CreatedAtColumn): + col = f'"{name}"' if dialect == "postgres" else f"`{name}`" + if name in old_columns: + old_columns = [c for c in old_columns if c != name] + if dialect == "mysql": + alterations.append(f"MODIFY COLUMN {col} {_values}") + elif dialect == "postgres": + alterations.append(f"ALTER COLUMN {col} {_values}") + elif dialect == "sqlite": + alterations.append(f"RENAME TO {col} {_values}") + else: + alterations.append(f"ADD {col} {field.created_at}") + elif isinstance(field, UpdatedAtColumn): + col = f'"{name}"' if dialect == "postgres" else f"`{name}`" + if name in old_columns: + old_columns = [c for c in old_columns if c != name] + if dialect == "mysql": + alterations.append(f"MODIFY COLUMN {col} {_values}") + elif dialect == "postgres": + alterations.append(f"ALTER COLUMN {col} {_values}") + elif dialect == "sqlite": + alterations.append(f"RENAME TO {col} {_values}") + else: + alterations.append(f"ADD {col} {field.updated_at}") + elif isinstance(field, ForeignKeyColumn): + pk, pk_type = field.table._get_pk_attributes(dialect=dialect) + parent_table_name = field.table._get_table_name() + col = f'"{name}"' if dialect == "postgres" else f"`{name}`" + _value = ( + "{pk_type} {unique} {nullable} REFERENCES {parent_table_name}({pk}) ON DELETE {onDelete} ON UPDATE {onUpdate}".format( + onDelete=field.onDelete, + onUpdate=field.onUpdate, + pk_type=pk_type, + parent_table_name=f'"{parent_table_name}"' + if dialect == "postgres" + else f"`{parent_table_name}`", + pk=f'"{pk}"' if dialect == "postgres" else f"`{pk}`", + nullable="NOT NULL", + unique="UNIQUE" if field.maps_to == "1-1" else "", + ) + if field.required + else "{pk_type} REFERENCES {parent_table_name}({pk}) ON DELETE SET NULL".format( + pk_type=pk_type, + parent_table_name=f'"{parent_table_name}"' + if dialect == "postgres" + else f"`{parent_table_name}`", + pk=f'"{pk}"' if dialect == "postgres" else f"`{pk}`", + ) + ) + + if name in old_columns: + old_columns = [c for c in old_columns if c != name] + if dialect == "mysql": + alterations.append(f"MODIFY COLUMN {col} {_values}") + elif dialect == "postgres": + alterations.append(f"ALTER COLUMN {col} {_values}") + elif dialect == "sqlite": + alterations.append(f"RENAME TO {col} {_values}") + else: + alterations.append(f"ADD {col} {_value}") + + # delete columns + for name in old_columns: + col = f'"{name}"' if dialect == "postgres" else f"`{name}`" + alterations.append(f"DROP COLUMN {col}") + + return pks, alterations diff --git a/dataloom/utils/create_table.py b/dataloom/utils/create_table.py index 8ef4988..db167bf 100644 --- a/dataloom/utils/create_table.py +++ b/dataloom/utils/create_table.py @@ -13,23 +13,14 @@ def get_create_table_params( model, dialect: DIALECT_LITERAL, - child_alias_name: str, - child_pk_name: str, - child_name: str, ): pks = [] user_fields = [] predefined_fields = [] - parent_pk_name = None - parent_pk_type = None - n_2_n = None - my_name = "" + for name, field in inspect.getmembers(model): if isinstance(field, PrimaryKeyColumn): pks.append(f'"{name}"' if dialect == "postgres" else f"`{name}`") - parent_pk_name = name - parent_pk_type = field.sql_type(dialect=dialect) - _values = re.sub( r"\s+", " ", @@ -84,63 +75,30 @@ def get_create_table_params( # 4. What is the relationship type being mapped? pk, pk_type = field.table._get_pk_attributes(dialect=dialect) parent_table_name = field.table._get_table_name() - if field.maps_to == "N-N": - n_2_n = True - my_name = field.table.__name__.lower() - continue - else: - value = ( - "{pk_type} {unique} {nullable} REFERENCES {parent_table_name}({pk}) ON DELETE {onDelete} ON UPDATE {onUpdate}".format( - onDelete=field.onDelete, - onUpdate=field.onUpdate, - pk_type=pk_type, - parent_table_name=f'"{parent_table_name}"' - if dialect == "postgres" - else f"`{parent_table_name}`", - pk=f'"{pk}"' if dialect == "postgres" else f"`{pk}`", - nullable="NOT NULL", - unique="UNIQUE" if field.maps_to == "1-1" else "", - ) - if field.required - else "{pk_type} REFERENCES {parent_table_name}({pk}) ON DELETE SET NULL".format( - pk_type=pk_type, - parent_table_name=f'"{parent_table_name}"' - if dialect == "postgres" - else f"`{parent_table_name}`", - pk=f'"{pk}"' if dialect == "postgres" else f"`{pk}`", - ) + value = ( + "{pk_type} {unique} {nullable} REFERENCES {parent_table_name}({pk}) ON DELETE {onDelete} ON UPDATE {onUpdate}".format( + onDelete=field.onDelete, + onUpdate=field.onUpdate, + pk_type=pk_type, + parent_table_name=f'"{parent_table_name}"' + if dialect == "postgres" + else f"`{parent_table_name}`", + pk=f'"{pk}"' if dialect == "postgres" else f"`{pk}`", + nullable="NOT NULL", + unique="UNIQUE" if field.maps_to == "1-1" else "", ) - predefined_fields.append( - (f'"{name}"' if dialect == "postgres" else f"`{name}`", value) + if field.required + else "{pk_type} REFERENCES {parent_table_name}({pk}) ON DELETE SET NULL".format( + pk_type=pk_type, + parent_table_name=f'"{parent_table_name}"' + if dialect == "postgres" + else f"`{parent_table_name}`", + pk=f'"{pk}"' if dialect == "postgres" else f"`{pk}`", ) + ) + predefined_fields.append( + (f'"{name}"' if dialect == "postgres" else f"`{name}`", value) + ) - sql = None - if n_2_n is not None: - ref_name = ( - f'"{my_name+ '_' + child_alias_name}"' - if dialect == "postgres" - else f"`{my_name+ '_' + child_alias_name}`" - ) - - child_pk_column_name = ( - f'"{child_alias_name}_{child_pk_name.replace(r'"', '')}"' - if dialect == "postgres" - else f'`{child_alias_name}_{re.sub(r'"|`', '', child_pk_name)}`' - ) - parent_pk_column_name = ( - f'"{my_name}_{parent_pk_name}"' - if dialect == "postgres" - else f"`{my_name}_{parent_pk_name}`" - ) - sql = f""" - CREATE TABLE IF NOT EXISTS {ref_name} ( - {child_pk_column_name} {parent_pk_type}, - {parent_pk_column_name} {parent_pk_type}, - PRIMARY KEY ({parent_pk_column_name}, {child_pk_column_name}), - FOREIGN KEY ({parent_pk_column_name}) REFERENCES {parent_table_name}({pks[0]}), - FOREIGN KEY ({child_pk_column_name}) REFERENCES {child_name}({child_pk_name}) - ); - """ - - return pks, user_fields, predefined_fields, None # sql if sql else None + return pks, user_fields, predefined_fields diff --git a/hi.db b/hi.db index ec81647..2964e14 100644 Binary files a/hi.db and b/hi.db differ diff --git a/playground.py b/playground.py index 1bed6b8..f240f1d 100644 --- a/playground.py +++ b/playground.py @@ -27,20 +27,18 @@ sql_logger="console", ) -conn = sqlite_loom.connect() +pg_loom = Loom( + connection_uri="postgresql://postgres:root@localhost:5432/hi", + dialect="postgres", + sql_logger="console", +) -# pg_loom = Loom( -# connection_uri="postgresql://postgres:root@localhost:5432/hi", -# dialect="postgres", -# sql_logger="console", -# ) - -# mysql_loom = Loom( -# connection_uri="mysql://root:root@localhost:3306/hi", -# dialect="mysql", -# sql_logger="console", -# ) +mysql_loom = Loom( + connection_uri="mysql://root:root@localhost:3306/hi", + dialect="mysql", + sql_logger="console", +) class User(Model): @@ -48,115 +46,8 @@ class User(Model): id = PrimaryKeyColumn(type="int", auto_increment=True) name = Column(type="text", nullable=False, default="Bob") username = Column(type="varchar", unique=True, length=255) + bio = Column(type="varchar", unique=False, length=500) tokenVersion = Column(type="int", default=0) -@initialize(repr=True, to_dict=True, init=True, repr_identifier="id") -class Profile(Model): - __tablename__: Optional[TableColumn] = TableColumn(name="profiles") - id = PrimaryKeyColumn(type="int", auto_increment=True) - avatar = Column(type="text", nullable=False) - userId = ForeignKeyColumn( - User, - maps_to="1-1", - type="int", - required=True, - onDelete="CASCADE", - onUpdate="CASCADE", - ) - - -class Post(Model): - __tablename__: Optional[TableColumn] = TableColumn(name="posts") - id = PrimaryKeyColumn(type="int", auto_increment=True, nullable=False, unique=True) - completed = Column(type="boolean", default=False) - title = Column(type="varchar", length=255, nullable=False) - # timestamps - createdAt = CreatedAtColumn() - # relations - userId = ForeignKeyColumn( - User, - maps_to="1-N", - type="int", - required=True, - onDelete="CASCADE", - onUpdate="CASCADE", - ) - - -class Category(Model): - __tablename__: Optional[TableColumn] = TableColumn(name="categories") - id = PrimaryKeyColumn(type="int", auto_increment=True, nullable=False, unique=True) - type = Column(type="varchar", length=255, nullable=False) - - postId = ForeignKeyColumn( - Post, - maps_to="N-1", - type="int", - required=True, - onDelete="CASCADE", - onUpdate="CASCADE", - ) - - -# conn, tables = mysql_loom.connect_and_sync( -# [User, Profile, Post, Category], drop=True, force=True -# ) - - -# userId = mysql_loom.insert_one( -# instance=User, -# values=ColumnValue(name="username", value="@miller"), -# ) - -# aff = mysql_loom.delete_bulk( -# instance=User, -# filters=Filter(column="id", value=1), -# ) -# print(aff) - - -# userId2 = mysql_loom.insert_one( -# instance=User, -# values=ColumnValue(name="username", value="bob"), -# ) - -# profileId = mysql_loom.insert_one( -# instance=Profile, -# values=[ -# ColumnValue(name="userId", value=userId), -# ColumnValue(name="avatar", value="hello.jpg"), -# ], -# ) -# for title in ["Hello", "Hello", "What are you doing", "Coding"]: -# mysql_loom.insert_one( -# instance=Post, -# values=[ -# ColumnValue(name="userId", value=userId), -# ColumnValue(name="title", value=title), -# ], -# ) - - -# for cat in ["general", "education", "tech", "sport"]: -# mysql_loom.insert_one( -# instance=Category, -# values=[ -# ColumnValue(name="postId", value=1), -# ColumnValue(name="type", value=cat), -# ], -# ) - -# posts = mysql_loom.find_many( -# Post, -# select="id", -# filters=Filter(column="id", operator="gt", value=1), -# group=Group( -# column="id", -# function="MAX", -# having=Having(column="id", operator="in", value=(2, 3, 4)), -# return_aggregation_column=False, -# ), -# ) - -# print(posts) +conn, tables = pg_loom.connect_and_sync([User], alter=True)