diff --git a/bin/clear-all-heartbeats.py b/bin/clear-all-heartbeats.py index 28db7fbb7..628228f97 100755 --- a/bin/clear-all-heartbeats.py +++ b/bin/clear-all-heartbeats.py @@ -43,7 +43,7 @@ def main(host, port, database): count += 1 batch_client.execute() - print("{} heartbeats deleted!".format(count)) + print(f"{count} heartbeats deleted!") exit(0) diff --git a/bin/clear-heartbeat-status.py b/bin/clear-heartbeat-status.py index f583bfcfc..f1a0fce7c 100755 --- a/bin/clear-heartbeat-status.py +++ b/bin/clear-heartbeat-status.py @@ -25,7 +25,7 @@ def main(host, port, account_id, folder_id, device_id): print("Clearing heartbeat status...") n = clear_heartbeat_status(account_id, folder_id, device_id, host, port) - print("{} folders cleared.".format(n)) + print(f"{n} folders cleared.") exit(0) diff --git a/bin/contact-search-backfill.py b/bin/contact-search-backfill.py index 999f23dc1..ecfb8b956 100755 --- a/bin/contact-search-backfill.py +++ b/bin/contact-search-backfill.py @@ -23,7 +23,7 @@ def main(namespace_ids): maybe_enable_rollbar() for namespace_id in namespace_ids: - log.info("indexing namespace {namespace_id}".format(namespace_id=namespace_id)) + log.info(f"indexing namespace {namespace_id}") index_namespace(namespace_id) diff --git a/bin/contact-search-service.py b/bin/contact-search-service.py index aeafdd8b0..a498ebc4c 100755 --- a/bin/contact-search-service.py +++ b/bin/contact-search-service.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -""" Start the contact search indexing service. """ +"""Start the contact search indexing service.""" from gevent import monkey monkey.patch_all() diff --git a/bin/correct-autoincrements.py b/bin/correct-autoincrements.py index cd24fe5bf..017c2f3c7 100755 --- a/bin/correct-autoincrements.py +++ b/bin/correct-autoincrements.py @@ -28,14 +28,14 @@ def reset_db(dry_run): engine = engine_manager.engines[key] schema = shard["SCHEMA_NAME"] - print("Resetting invalid autoincrements for database: {}".format(schema)) + print(f"Resetting invalid autoincrements for database: {schema}") reset_tables = reset_invalid_autoincrements(engine, schema, key, dry_run) if dry_run: print("dry_run=True") if reset_tables: print("Reset tables: {}".format(", ".join(reset_tables))) else: - print("Schema {} okay".format(schema)) + print(f"Schema {schema} okay") if __name__ == "__main__": diff --git a/bin/create-db.py b/bin/create-db.py index 0b8146c2d..5772920c0 100755 --- a/bin/create-db.py +++ b/bin/create-db.py @@ -53,12 +53,12 @@ def main(target_hostname, host_ip): ) schema_name = shard["SCHEMA_NAME"] - print("Setting up database: {}".format(schema_name)) + print(f"Setting up database: {schema_name}") # Create the database IF needed. base_engine.execute( - "CREATE DATABASE IF NOT EXISTS {} DEFAULT CHARACTER " - "SET utf8mb4 DEFAULT COLLATE utf8mb4_general_ci;".format(schema_name) + f"CREATE DATABASE IF NOT EXISTS {schema_name} DEFAULT CHARACTER " + "SET utf8mb4 DEFAULT COLLATE utf8mb4_general_ci;" ) engine = engine_manager.engines[int(key)] @@ -71,9 +71,7 @@ def main(target_hostname, host_ip): assert ( current_revision ), "Need current revision in alembic_version table." - print( - "Already revisioned by alembic version: {}".format(current_revision) - ) + print(f"Already revisioned by alembic version: {current_revision}") else: # Initialize shards, stamp alembic revision print("Initializing database.") @@ -81,7 +79,7 @@ def main(target_hostname, host_ip): alembic_ini_filename = os.environ.get("ALEMBIC_INI_PATH", "alembic.ini") assert os.path.isfile( alembic_ini_filename - ), "Must have alembic.ini file at {}".format(alembic_ini_filename) + ), f"Must have alembic.ini file at {alembic_ini_filename}" alembic_cfg = alembic.config.Config(alembic_ini_filename) # Alembic option values need to be strings. alembic_cfg.set_main_option("shard_id", str(key)) diff --git a/bin/create-encryption-keys.py b/bin/create-encryption-keys.py index 5936b68ad..1f322484f 100755 --- a/bin/create-encryption-keys.py +++ b/bin/create-encryption-keys.py @@ -18,8 +18,7 @@ def main(): # If the config contains encryption keys, don't override. if config.get("SECRET_ENCRYPTION_KEY"): raise Exception( - "Encryption keys already present in secrets config " - "file {0}".format(secrets_path) + "Encryption keys already present in secrets config " f"file {secrets_path}" ) # Generate keys @@ -37,12 +36,10 @@ def main(): # Update it try: with open(secrets_path, "a") as f: - print("Writing keys to secrets config file {0}".format(secrets_path)) + print(f"Writing keys to secrets config file {secrets_path}") yaml.dump(data, f, default_flow_style=False) - except IOError: - raise Exception( - "Check file write permissions on config file {0}".format(secrets_path) - ) + except OSError: + raise Exception(f"Check file write permissions on config file {secrets_path}") # Update the config dict config.update(data) diff --git a/bin/delete-account-data.py b/bin/delete-account-data.py index 271f88ff8..3bbfb4c16 100755 --- a/bin/delete-account-data.py +++ b/bin/delete-account-data.py @@ -42,16 +42,16 @@ def delete_account_data(account_id, dry_run, yes, throttle): account = db_session.query(Account).get(account_id) if not account: - print("Account with id {} does NOT exist.".format(account_id)) - return + print(f"Account with id {account_id} does NOT exist.") + return None email_address = account.email_address namespace_id = account.namespace.id if account.sync_should_run or not account.is_marked_for_deletion: print( - "Account with id {} NOT marked for deletion.\n" - "Will NOT delete, goodbye.".format(account_id) + f"Account with id {account_id} NOT marked for deletion.\n" + "Will NOT delete, goodbye." ) return -1 @@ -69,7 +69,7 @@ def delete_account_data(account_id, dry_run, yes, throttle): print("Will NOT delete, goodbye.") return 0 - print("Deleting account with id: {}...".format(account_id)) + print(f"Deleting account with id: {account_id}...") start = time.time() # Delete data in database @@ -77,18 +77,18 @@ def delete_account_data(account_id, dry_run, yes, throttle): print("Deleting database data") delete_namespace(namespace_id, dry_run=dry_run, throttle=throttle) except Exception as e: - print("Database data deletion failed! Error: {}".format(str(e))) + print(f"Database data deletion failed! Error: {str(e)}") return -1 database_end = time.time() - print("Database data deleted. Time taken: {}".format(database_end - start)) + print(f"Database data deleted. Time taken: {database_end - start}") # Delete liveness data print("Deleting liveness data") clear_heartbeat_status(account_id) end = time.time() - print("All data deleted successfully! TOTAL time taken: {}".format(end - start)) + print(f"All data deleted successfully! TOTAL time taken: {end - start}") return 0 diff --git a/bin/get-accounts-for-host.py b/bin/get-accounts-for-host.py index 11fcc2b91..c67d7d851 100644 --- a/bin/get-accounts-for-host.py +++ b/bin/get-accounts-for-host.py @@ -16,7 +16,7 @@ def main(hostname): with global_session_scope() as db_session: account_ids = db_session.query(Account.id).filter(Account.sync_host == hostname) - print("Accounts being synced by {}:".format(hostname)) + print(f"Accounts being synced by {hostname}:") for account_id in account_ids: print(account_id[0]) db_session.commit() diff --git a/bin/get-id.py b/bin/get-id.py index c2b3c96dd..61926153b 100755 --- a/bin/get-id.py +++ b/bin/get-id.py @@ -43,7 +43,7 @@ def main(type, id, public_id): type = type.lower() if type not in cls_for_type: - print("Error: unknown type '{}'".format(type)) + print(f"Error: unknown type '{type}'") sys.exit(-1) cls = cls_for_type[type] diff --git a/bin/get-object.py b/bin/get-object.py index 948e95917..27cba0fa4 100755 --- a/bin/get-object.py +++ b/bin/get-object.py @@ -58,7 +58,7 @@ def main(type, id, public_id, account_id, namespace_id, readwrite): type = type.lower() if type not in cls_for_type: - print("Error: unknown type '{}'".format(type)) + print(f"Error: unknown type '{type}'") sys.exit(-1) cls = cls_for_type[type] diff --git a/bin/inbox-auth.py b/bin/inbox-auth.py index 6c1ad4f90..631c959c4 100755 --- a/bin/inbox-auth.py +++ b/bin/inbox-auth.py @@ -78,7 +78,7 @@ def main(email_address, reauth, target, provider): except NotSupportedError as e: sys.exit(str(e)) - print("OK. Authenticated account for {}".format(email_address)) + print(f"OK. Authenticated account for {email_address}") if __name__ == "__main__": diff --git a/bin/inbox-start.py b/bin/inbox-start.py index f34fb7027..69638a12f 100755 --- a/bin/inbox-start.py +++ b/bin/inbox-start.py @@ -39,7 +39,7 @@ esc = "\033" -banner = r"""{esc}[1;95m +banner = rf"""{esc}[1;95m _ _ _ | \ | | | | | \| |_ _| | __ _ ___ @@ -53,9 +53,7 @@ {esc}[0m Use CTRL-C to stop. - """.format( - esc=esc -) + """ @click.command() @@ -110,7 +108,7 @@ def main(prod, enable_tracer, enable_profiler, config, process_num, exit_after): total_processes = int(os.environ.get("MAILSYNC_PROCESSES", 1)) - setproctitle.setproctitle("sync-engine-{}".format(process_num)) + setproctitle.setproctitle(f"sync-engine-{process_num}") log = get_logger() log.info( @@ -132,7 +130,7 @@ def main(prod, enable_tracer, enable_profiler, config, process_num, exit_after): port = 16384 + process_num enable_profiler_api = inbox_config.get("DEBUG_PROFILING_ON") - process_identifier = "{}:{}".format(platform.node(), process_num) + process_identifier = f"{platform.node()}:{process_num}" if exit_after: exit_after = exit_after.split(":") diff --git a/bin/migrate-db.py b/bin/migrate-db.py index 1d580c4c1..1315b658f 100755 --- a/bin/migrate-db.py +++ b/bin/migrate-db.py @@ -17,7 +17,7 @@ def main(): alembic_ini_filename = os.environ.get("ALEMBIC_INI_PATH", "alembic.ini") assert os.path.isfile( alembic_ini_filename - ), "Missing alembic.ini file at {}".format(alembic_ini_filename) + ), f"Missing alembic.ini file at {alembic_ini_filename}" database_hosts = config.get_required("DATABASE_HOSTS") @@ -33,15 +33,13 @@ def main(): key = shard["ID"] try: - print("Upgrading shard_id {}".format(key)) + print(f"Upgrading shard_id {key}") alembic_cfg = alembic.config.Config(alembic_ini_filename) alembic_cfg.set_main_option("shard_id", str(key)) alembic.command.upgrade(alembic_cfg, "head") - print("Upgraded shard_id {}\n".format(key)) + print(f"Upgraded shard_id {key}\n") except alembic.util.CommandError as e: - print( - "FAILED to upgrade shard_id {} with error: {}".format(key, str(e)) - ) + print(f"FAILED to upgrade shard_id {key} with error: {str(e)}") continue diff --git a/bin/mysql-prompt.py b/bin/mysql-prompt.py index 1bd68bcb4..1bdc68bf7 100755 --- a/bin/mysql-prompt.py +++ b/bin/mysql-prompt.py @@ -40,7 +40,7 @@ def main(shard_num): for key in creds.keys(): if creds[key] is None: - print("Error: {key} is None".format(key=key)) + print(f"Error: {key} is None") sys.exit(-1) proc = subprocess.Popen( diff --git a/bin/set-desired-host.py b/bin/set-desired-host.py index b22ee6614..9ddef56eb 100644 --- a/bin/set-desired-host.py +++ b/bin/set-desired-host.py @@ -19,9 +19,9 @@ def main(account_id, desired_host, dry_run, toggle_sync): with global_session_scope() as db_session: account = db_session.query(Account).get(int(account_id)) - print("Before sync host: {}".format(account.sync_host)) - print("Before desired sync host: {}".format(account.desired_sync_host)) - print("Before sync should run: {}".format(account.sync_should_run)) + print(f"Before sync host: {account.sync_host}") + print(f"Before desired sync host: {account.desired_sync_host}") + print(f"Before sync should run: {account.sync_should_run}") if dry_run: return @@ -29,9 +29,9 @@ def main(account_id, desired_host, dry_run, toggle_sync): if toggle_sync: account.sync_should_run = not account.sync_should_run - print("After sync host: {}".format(account.sync_host)) - print("After desired sync host: {}".format(account.desired_sync_host)) - print("After sync should run: {}".format(account.sync_should_run)) + print(f"After sync host: {account.sync_host}") + print(f"After desired sync host: {account.desired_sync_host}") + print(f"After sync should run: {account.sync_should_run}") db_session.commit() diff --git a/bin/stamp-db.py b/bin/stamp-db.py index 4dca8dcd4..b846ae932 100755 --- a/bin/stamp-db.py +++ b/bin/stamp-db.py @@ -18,7 +18,7 @@ def main(revision_id): alembic_ini_filename = os.environ.get("ALEMBIC_INI_PATH", "alembic.ini") assert os.path.isfile( alembic_ini_filename - ), "Missing alembic.ini file at {}".format(alembic_ini_filename) + ), f"Missing alembic.ini file at {alembic_ini_filename}" database_hosts = config.get_required("DATABASE_HOSTS") @@ -34,13 +34,13 @@ def main(revision_id): key = shard["ID"] try: - print("Stamping shard_id {}".format(key)) + print(f"Stamping shard_id {key}") alembic_cfg = alembic.config.Config(alembic_ini_filename) alembic_cfg.set_main_option("shard_id", str(key)) alembic.command.stamp(alembic_cfg, revision_id) - print("Stamped shard_id {}\n".format(key)) + print(f"Stamped shard_id {key}\n") except alembic.util.CommandError as e: - print("FAILED to stamp shard_id {} with error: {}".format(key, str(e))) + print(f"FAILED to stamp shard_id {key} with error: {str(e)}") continue diff --git a/bin/syncback-service.py b/bin/syncback-service.py index 3d0a8fec2..a65648d4a 100755 --- a/bin/syncback-service.py +++ b/bin/syncback-service.py @@ -58,7 +58,7 @@ ) def main(prod, config, process_num, syncback_id, enable_tracer, enable_profiler): """Launch the actions syncback service.""" - setproctitle("syncback-{}".format(process_num)) + setproctitle(f"syncback-{process_num}") maybe_enable_rollbar() diff --git a/bin/syncback-stats.py b/bin/syncback-stats.py index 23fb36663..825b8b6f5 100755 --- a/bin/syncback-stats.py +++ b/bin/syncback-stats.py @@ -40,16 +40,10 @@ def main(): .group_by(ActionLog.namespace_id) ): print( - "{} (pending actions), {} (shard), {} (namespace)".format( - c, key, namespace_id - ) + f"{c} (pending actions), {key} (shard), {namespace_id} (namespace)" ) total_pending_actions += c - print( - "total pending actions for shard {}: {}".format( - key, total_pending_actions - ) - ) + print(f"total pending actions for shard {key}: {total_pending_actions}") if __name__ == "__main__": diff --git a/bin/unschedule-account-syncs.py b/bin/unschedule-account-syncs.py index cf12018d6..8a8e3b047 100755 --- a/bin/unschedule-account-syncs.py +++ b/bin/unschedule-account-syncs.py @@ -46,7 +46,7 @@ def main(dry_run, number, hostname, process): if process is not None: hostname = ":".join([hostname, process]) to_unschedule = db_session.query(Account.id).filter( - Account.sync_host.like("{}%".format(hostname)) + Account.sync_host.like(f"{hostname}%") ) if number: to_unschedule = to_unschedule.limit(number) diff --git a/bin/update-categories.py b/bin/update-categories.py index 3ed4b3c8d..4e27cc2e6 100755 --- a/bin/update-categories.py +++ b/bin/update-categories.py @@ -22,14 +22,14 @@ def main(shard_id): def update_categories_for_shard(shard_id): - print("Updating categories for shard {}".format(shard_id)) + print(f"Updating categories for shard {shard_id}") engine = engine_manager.engines[shard_id] query = "UPDATE category SET name='' WHERE name is NULL;" engine.execute(query) - print("Updated names, updating deleted_at for shard {}".format(shard_id)) + print(f"Updated names, updating deleted_at for shard {shard_id}") query = ( "UPDATE category SET deleted_at='1970-01-01 00:00:00' WHERE deleted_at is NULL;" @@ -38,14 +38,14 @@ def update_categories_for_shard(shard_id): def update_folders_and_labels_for_shard(shard_id): - print("Updating folders for shard {}".format(shard_id)) + print(f"Updating folders for shard {shard_id}") engine = engine_manager.engines[shard_id] query = "UPDATE folder SET canonical_name='' WHERE canonical_name is NULL;" engine.execute(query) - print("Updated folders, updating labels for shard {}".format(shard_id)) + print(f"Updated folders, updating labels for shard {shard_id}") query = "UPDATE label SET canonical_name='' WHERE canonical_name is NULL;" engine.execute(query) diff --git a/bin/verify-db.py b/bin/verify-db.py index b2ccc1627..e21d40046 100755 --- a/bin/verify-db.py +++ b/bin/verify-db.py @@ -24,7 +24,7 @@ def main(): engine = engine_manager.engines[key] schema = shard["SCHEMA_NAME"] - print("Verifying database: {}".format(schema)) + print(f"Verifying database: {schema}") verify_db(engine, schema, key) diff --git a/inbox/actions/backends/generic.py b/inbox/actions/backends/generic.py index 16857a6c5..381e48f3b 100644 --- a/inbox/actions/backends/generic.py +++ b/inbox/actions/backends/generic.py @@ -1,5 +1,5 @@ -""" Operations for syncing back local datastore changes to - generic IMAP providers. +"""Operations for syncing back local datastore changes to +generic IMAP providers. """ import contextlib from collections import defaultdict diff --git a/inbox/actions/backends/gmail.py b/inbox/actions/backends/gmail.py index b4514ddaf..80bf01d8f 100644 --- a/inbox/actions/backends/gmail.py +++ b/inbox/actions/backends/gmail.py @@ -1,4 +1,4 @@ -""" Operations for syncing back local datastore changes to Gmail. """ +"""Operations for syncing back local datastore changes to Gmail.""" import contextlib from imaplib import IMAP4 diff --git a/inbox/actions/base.py b/inbox/actions/base.py index 7320f1e8d..de0208b54 100644 --- a/inbox/actions/base.py +++ b/inbox/actions/base.py @@ -1,4 +1,4 @@ -""" Code for propagating Nylas datastore changes to account backends. +"""Code for propagating Nylas datastore changes to account backends. Syncback actions don't update anything in the local datastore; the Nylas datastore is updated asynchronously (see namespace.py) and bookkeeping about diff --git a/inbox/api/err.py b/inbox/api/err.py index 482ac348e..a6a4fd178 100644 --- a/inbox/api/err.py +++ b/inbox/api/err.py @@ -51,7 +51,8 @@ class APIException(Exception): class InputError(APIException): """Raised on invalid user input (missing required parameter, value too - long, etc.)""" + long, etc.) + """ status_code = 400 diff --git a/inbox/api/kellogs.py b/inbox/api/kellogs.py index 6e92aa340..5a6af5232 100644 --- a/inbox/api/kellogs.py +++ b/inbox/api/kellogs.py @@ -120,7 +120,8 @@ def _get_namespace_public_id(obj): def _format_participant_data(participant): """Event.participants is a JSON blob which may contain internal data. This function returns a dict with only the data we want to make - public.""" + public. + """ dct = {} for attribute in ["name", "status", "email", "comment"]: dct[attribute] = participant.get(attribute) diff --git a/inbox/api/ns_api.py b/inbox/api/ns_api.py index 44c6b59c5..c49724d78 100644 --- a/inbox/api/ns_api.py +++ b/inbox/api/ns_api.py @@ -631,9 +631,7 @@ def message_read_api(public_id): else: # Try getting the message from the email provider. account = g.namespace.account - statsd_string = "api.direct_fetching.{}.{}".format( - account.provider, account.id - ) + statsd_string = f"api.direct_fetching.{account.provider}.{account.id}" try: with statsd_client.timer(f"{statsd_string}.provider_latency"): @@ -685,8 +683,8 @@ def message_read_api(public_id): request.environ["log_context"]["message_id"] = message.id raise NotFoundError( - "Couldn't find raw contents for message `{}`. " - "Please try again in a few minutes.".format(public_id) + f"Couldn't find raw contents for message `{public_id}`. " + "Please try again in a few minutes." ) return encoder.jsonify(message) @@ -926,9 +924,7 @@ def folder_label_delete_api(public_id): messages_exist = g.db_session.query(messages_with_category).scalar() if messages_exist: raise InputError( - "Folder {} cannot be deleted because it contains messages.".format( - public_id - ) + f"Folder {public_id} cannot be deleted because it contains messages." ) if g.api_features.optimistic_updates: @@ -1855,7 +1851,8 @@ def multi_send(draft_id): session. Sends a copy of the draft at draft_id to the specified address with the specified body, and ensures that a corresponding sent message is either not created in the user's Sent folder or is immediately - deleted from it.""" + deleted from it. + """ request_started = time.time() account = g.namespace.account @@ -1898,8 +1895,8 @@ def multi_send(draft_id): @app.route("/send-multiple/", methods=["DELETE"]) def multi_send_finish(draft_id): """Closes out a multi-send session by marking the sending draft as sent - and moving it to the user's Sent folder.""" - + and moving it to the user's Sent folder. + """ account = g.namespace.account if account.discriminator == "easaccount": diff --git a/inbox/api/srv.py b/inbox/api/srv.py index bbeac0191..e06767313 100644 --- a/inbox/api/srv.py +++ b/inbox/api/srv.py @@ -70,13 +70,13 @@ def auth(): or request.path.startswith("/w/") or request.path.startswith("/metrics") ): - return + return None if not request.authorization or not request.authorization.username: AUTH_ERROR_MSG = ( "Could not verify access credential.", 401, - {"WWW-Authenticate": 'Basic realm="API ' 'Access Token Required"'}, + {"WWW-Authenticate": 'Basic realm="API Access Token Required"'}, ) auth_header = request.headers.get("Authorization", None) @@ -108,7 +108,7 @@ def auth(): ( "Could not verify access credential.", 401, - {"WWW-Authenticate": 'Basic realm="API ' 'Access Token Required"'}, + {"WWW-Authenticate": 'Basic realm="API Access Token Required"'}, ) ) @@ -275,7 +275,6 @@ def modify_account(namespace_public_id): This stops syncing an account until it is explicitly resumed. """ - data = request.get_json(force=True) with global_session_scope() as db_session: @@ -335,7 +334,8 @@ def home(): @app.route("/logout") def logout(): """Utility function used to force browsers to reset cached HTTP Basic Auth - credentials""" + credentials + """ return make_response( ( ".", diff --git a/inbox/api/update.py b/inbox/api/update.py index 0aa0e72ef..4bfdcfb9c 100644 --- a/inbox/api/update.py +++ b/inbox/api/update.py @@ -109,7 +109,7 @@ def update_message_flags(message, db_session, optimistic, unread=None, starred=N def parse_folder(request_data, db_session, namespace_id): # TODO deprecate being able to post "folder" and not "folder_id" if "folder_id" not in request_data and "folder" not in request_data: - return + return None folder_public_id = request_data.pop("folder_id", None) or request_data.pop( "folder", None ) @@ -153,7 +153,7 @@ def update_message_folder(message, db_session, category, optimistic): def parse_labels(request_data, db_session, namespace_id): # TODO deprecate being able to post "labels" and not "label_ids" if "label_ids" not in request_data and "labels" not in request_data: - return + return None label_public_ids = request_data.pop("label_ids", []) or request_data.pop( "labels", [] diff --git a/inbox/api/wsgi.py b/inbox/api/wsgi.py index 9920c8788..9167e752e 100644 --- a/inbox/api/wsgi.py +++ b/inbox/api/wsgi.py @@ -18,7 +18,8 @@ class NylasWSGIHandler(WSGIHandler): """Custom WSGI handler class to customize request logging. Based on - gunicorn.workers.ggevent.PyWSGIHandler.""" + gunicorn.workers.ggevent.PyWSGIHandler. + """ def log_request(self): # gevent.pywsgi tries to call log.write(), but Python logger objects @@ -94,7 +95,8 @@ def handle_error(self, type, value, tb): class NylasWSGIWorker(GeventWorker): """Custom worker class for gunicorn. Based on - gunicorn.workers.ggevent.GeventPyWSGIWorker.""" + gunicorn.workers.ggevent.GeventPyWSGIWorker. + """ server_class = WSGIServer wsgi_handler = NylasWSGIHandler diff --git a/inbox/auth/oauth.py b/inbox/auth/oauth.py index 7efdd9389..8fc0d2269 100644 --- a/inbox/auth/oauth.py +++ b/inbox/auth/oauth.py @@ -133,7 +133,7 @@ def _new_access_token_from_authalligator( ) aa_account = aa_response except AccountError as exc: - log.warn( + log.warning( "AccountError during AuthAlligator account query", account_id=account.id, error_code=exc.code and exc.code.value, diff --git a/inbox/contacts/algorithms.py b/inbox/contacts/algorithms.py index 56f408470..4341ec68b 100644 --- a/inbox/contacts/algorithms.py +++ b/inbox/contacts/algorithms.py @@ -186,7 +186,7 @@ def _combine_similar_molecules(molecules_list): combined = [False] * len(molecules_list) new_guys = [] for j in range(new_guys_start_idx, len(molecules_list)): - for i in range(0, j): + for i in range(j): if combined[i]: continue (g1, m1), (g2, m2) = molecules_list[i], molecules_list[j] diff --git a/inbox/contacts/carddav.py b/inbox/contacts/carddav.py index bcee51f83..508007a30 100644 --- a/inbox/contacts/carddav.py +++ b/inbox/contacts/carddav.py @@ -52,7 +52,6 @@ def __init__(self, email_address, password, base_url): def get_principal_url(self): """Use PROPFIND method to find the `principal` carddav url""" - payload = """ diff --git a/inbox/contacts/crud.py b/inbox/contacts/crud.py index 9947116e0..03741fbbb 100644 --- a/inbox/contacts/crud.py +++ b/inbox/contacts/crud.py @@ -1,5 +1,6 @@ """Utility functions for creating, reading, updating and deleting contacts. -Called by the API.""" +Called by the API. +""" import uuid from inbox.models import Contact diff --git a/inbox/contacts/icloud.py b/inbox/contacts/icloud.py index 163c82820..86375ea08 100644 --- a/inbox/contacts/icloud.py +++ b/inbox/contacts/icloud.py @@ -105,7 +105,7 @@ def get_items(self, sync_from_dt=None, max_results=100000): # TODO add paging for requesting all self.log.error( "Error parsing CardDav response into contact: " - "{}".format(ET.tostring(refprop)) + f"{ET.tostring(refprop)}" ) continue diff --git a/inbox/contacts/processing.py b/inbox/contacts/processing.py index a1088a5f8..184b69d9f 100644 --- a/inbox/contacts/processing.py +++ b/inbox/contacts/processing.py @@ -43,7 +43,7 @@ def _get_contact_map(db_session, namespace_id, all_addresses): def _get_contact_from_map(contact_map, name, email_address): if not valid_email(email_address): - return + return None canonicalized_address = canonicalize(email_address) contact = contact_map.get(canonicalized_address) diff --git a/inbox/contacts/search.py b/inbox/contacts/search.py index c4f87b980..076ccda1f 100644 --- a/inbox/contacts/search.py +++ b/inbox/contacts/search.py @@ -93,8 +93,8 @@ def __init__(self, namespace_id): def _fetch_search_page(self, **kwargs): """Make sure we always filter results by namespace and apply the - correct query options.""" - + correct query options. + """ namespace_filter = f"(and namespace_id:{self.namespace_id})" if "query" not in kwargs: kwargs["query"] = namespace_filter diff --git a/inbox/contacts/vcard.py b/inbox/contacts/vcard.py index fc8d37432..72d12b101 100644 --- a/inbox/contacts/vcard.py +++ b/inbox/contacts/vcard.py @@ -35,13 +35,12 @@ def list_clean(string): - """transforms a comma seperated string to a list, stripping whitespaces + """Transforms a comma seperated string to a list, stripping whitespaces "HOME, WORK,pref" -> ['HOME', 'WORK', 'pref'] string: string of comma seperated elements returns: list() """ - string = string.split(",") rstring = list() for element in string: @@ -144,7 +143,7 @@ def get_names(display_name): def fix_vobject(vcard): - """trying to fix some more or less common errors in vcards + """Trying to fix some more or less common errors in vcards for now only missing FN properties are handled (and reconstructed from N) :type vcard: vobject.base.Component (vobject based vcard) @@ -270,7 +269,7 @@ def alt_keys(self): return keylist def print_email(self): - """prints only name, email and type for use with mutt""" + """Prints only name, email and type for use with mutt""" collector = list() try: for one in self["EMAIL"]: @@ -284,7 +283,7 @@ def print_email(self): return "" def print_tel(self): - """prints only name, email and type for use with mutt""" + """Prints only name, email and type for use with mutt""" collector = list() try: for one in self["TEL"]: @@ -332,16 +331,18 @@ def _line_helper(self, line): @property def vcf(self): - """serialize to VCARD as specified in RFC2426, + """Serialize to VCARD as specified in RFC2426, if no UID is specified yet, one will be added (as a UID is mandatory for carddav as specified in RFC6352 - TODO make shure this random uid is unique""" + TODO make shure this random uid is unique + """ import random import string def generate_random_uid(): - """generate a random uid, when random isn't broken, getting a - random UID from a pool of roughly 10^56 should be good enough""" + """Generate a random uid, when random isn't broken, getting a + random UID from a pool of roughly 10^56 should be good enough + """ choice = string.ascii_uppercase + string.digits return "".join([random.choice(choice) for _ in range(36)]) diff --git a/inbox/crispin.py b/inbox/crispin.py index d2572de2a..6ebd92bf2 100644 --- a/inbox/crispin.py +++ b/inbox/crispin.py @@ -1,4 +1,4 @@ -""" IMAPClient wrapper for the Nylas Sync Engine.""" +"""IMAPClient wrapper for the Nylas Sync Engine.""" import contextlib import datetime import imaplib @@ -464,7 +464,6 @@ def _fetch_folder_list(self) -> List[Tuple[Tuple[bytes, ...], bytes, str]]: IMAPClient parses this response into a list of (flags, delimiter, name) tuples. """ - # As discovered in the wild list_folders() can return None as name, # we cannot handle those folders anyway so just filter them out. return [ @@ -656,7 +655,7 @@ def folders(self) -> List[RawFolder]: Fetch the list of folders for the account from the remote, return as a list of RawFolder objects. - NOTE: + Note: Always fetches the list of folders from the remote. """ @@ -709,7 +708,6 @@ def _get_missing_roles( Returns: a list of roles that did not appear as a role in folders """ - assert len(folders) > 0 assert len(roles) > 0 @@ -1110,7 +1108,8 @@ def logout(self): def idle(self, timeout): """Idle for up to `timeout` seconds. Make sure we take the connection back out of idle mode so that we can reuse this connection in another - context.""" + context. + """ self.conn.idle() try: r = self.conn.idle_check(timeout) @@ -1419,7 +1418,6 @@ def delete_draft(self, message_id_header): Leaves the Trash folder selected at the end of the method. """ - log.info("Trying to delete gmail draft", message_id_header=message_id_header) drafts_folder_name = self.folder_names()["drafts"][0] trash_folder_name = self.folder_names()["trash"][0] diff --git a/inbox/error_handling.py b/inbox/error_handling.py index b9e3f5f8a..3865ec1a9 100644 --- a/inbox/error_handling.py +++ b/inbox/error_handling.py @@ -31,7 +31,7 @@ def emit(self, record): "Uncaught error thrown by Flask/Werkzeug", "SyncbackWorker caught exception", ): - return + return None record.payload_data = {"fingerprint": event, "title": event} diff --git a/inbox/events/actions/backends/gmail.py b/inbox/events/actions/backends/gmail.py index 1e758e78d..b73991de6 100644 --- a/inbox/events/actions/backends/gmail.py +++ b/inbox/events/actions/backends/gmail.py @@ -1,4 +1,4 @@ -""" Operations for syncing back local Calendar changes to Gmail. """ +"""Operations for syncing back local Calendar changes to Gmail.""" from inbox.events.google import GoogleEventsProvider diff --git a/inbox/events/google.py b/inbox/events/google.py index 0c3cb9cf9..8ebf0a00e 100644 --- a/inbox/events/google.py +++ b/inbox/events/google.py @@ -56,7 +56,6 @@ def sync_calendars(self) -> CalendarSyncResponse: """ Fetch data for the user's calendars. """ - deletes = [] updates = [] items = self._get_raw_calendars() diff --git a/inbox/events/ical.py b/inbox/events/ical.py index 704cff9a2..f90619b96 100644 --- a/inbox/events/ical.py +++ b/inbox/events/ical.py @@ -773,9 +773,7 @@ def send_rsvp(ical_data, event, body_text, status, account): if status == "yes": msg.headers["Subject"] = f"Accepted: {event.message.subject}" elif status == "maybe": - msg.headers["Subject"] = "Tentatively accepted: {}".format( - event.message.subject - ) + msg.headers["Subject"] = f"Tentatively accepted: {event.message.subject}" elif status == "no": msg.headers["Subject"] = f"Declined: {event.message.subject}" diff --git a/inbox/events/microsoft/events_provider.py b/inbox/events/microsoft/events_provider.py index 15357b87f..b8c20da61 100644 --- a/inbox/events/microsoft/events_provider.py +++ b/inbox/events/microsoft/events_provider.py @@ -217,7 +217,6 @@ def webhook_notifications_enabled(self, account: Account) -> bool: * https://learn.microsoft.com/en-us/answers/questions/417261/error-on-adding-subscription-on-events-using-ms-gr.html * https://stackoverflow.com/questions/65030751/ms-graph-adding-subscription-returns-extensionerror-and-serviceunavailable """ - # First check if we already have cached value since this function is called # repeatedly and there is no need to do extra HTTP request every time. if self._webhook_notifications_enabled is not None: diff --git a/inbox/events/microsoft/graph_client.py b/inbox/events/microsoft/graph_client.py index ad3add2ca..50923f6a9 100644 --- a/inbox/events/microsoft/graph_client.py +++ b/inbox/events/microsoft/graph_client.py @@ -224,7 +224,6 @@ def get_event( The event. https://learn.microsoft.com/en-us/graph/api/resources/event """ - params = {} if fields: params["$select"] = ",".join(fields) @@ -324,7 +323,6 @@ def subscribe( The subscription. https://learn.microsoft.com/en-us/graph/api/resources/subscription """ - if resource_url.startswith(BASE_URL): resource_url = resource_url[len(BASE_URL) :] @@ -449,7 +447,6 @@ def subscribe_to_event_changes( The subscription. https://learn.microsoft.com/en-us/graph/api/resources/subscription """ - return self.subscribe( resource_url=f"/me/calendars/{calendar_id}/events", change_types=[ChangeType.CREATED, ChangeType.UPDATED, ChangeType.DELETED], diff --git a/inbox/events/recurring.py b/inbox/events/recurring.py index a23e94919..dcfa38253 100644 --- a/inbox/events/recurring.py +++ b/inbox/events/recurring.py @@ -129,7 +129,7 @@ def get_start_times(event, start=None, end=None): rrules = parse_rrule(event) if not rrules: - log.warn("Tried to expand a non-recurring event", event_id=event.id) + log.warning("Tried to expand a non-recurring event", event_id=event.id) return [event.start] excl_dates = parse_exdate(event) diff --git a/inbox/events/remote_sync.py b/inbox/events/remote_sync.py index ec055439e..0d8c0680d 100644 --- a/inbox/events/remote_sync.py +++ b/inbox/events/remote_sync.py @@ -410,7 +410,6 @@ def _delete_calendar(db_session: Any, calendar: Calendar) -> None: processing (Transaction record creation) blocking the event loop. """ - # load ids first to save memory event_ids = [ event_id diff --git a/inbox/events/util.py b/inbox/events/util.py index dc161c06d..ba3b72bb0 100644 --- a/inbox/events/util.py +++ b/inbox/events/util.py @@ -85,7 +85,8 @@ def valid_base36(uid): def removed_participants(original_participants, update_participants): """Returns the name and addresses of the participants which have been - removed.""" + removed. + """ original_table = { part["email"].lower(): part.get("name") for part in original_participants diff --git a/inbox/heartbeat/store.py b/inbox/heartbeat/store.py index 1890d08fb..649bae78f 100644 --- a/inbox/heartbeat/store.py +++ b/inbox/heartbeat/store.py @@ -96,7 +96,8 @@ def clear(self): class HeartbeatStore: """Store that proxies requests to Redis with handlers that also - update indexes and handle scanning through results.""" + update indexes and handle scanning through results. + """ _instances: Dict[Optional[str], "HeartbeatStore"] = {} diff --git a/inbox/ignition.py b/inbox/ignition.py index 531553f32..ddec933bd 100644 --- a/inbox/ignition.py +++ b/inbox/ignition.py @@ -206,11 +206,7 @@ def init_db(engine, key=0): event.listen( table, "after_create", - DDL( - "ALTER TABLE {tablename} AUTO_INCREMENT={increment}".format( - tablename=table, increment=increment - ) - ), + DDL(f"ALTER TABLE {table} AUTO_INCREMENT={increment}"), ) with disabled_dubiously_many_queries_warning(): MailSyncBase.metadata.create_all(engine) @@ -260,9 +256,7 @@ def reset_invalid_autoincrements(engine, schema, key, dry_run=True): increment = engine.execute(query.format(schema, table)).scalar() if increment is not None and (increment >> 48) != key: if not dry_run: - reset_query = "ALTER TABLE {} AUTO_INCREMENT={}".format( - table, (key << 48) + 1 - ) + reset_query = f"ALTER TABLE {table} AUTO_INCREMENT={(key << 48) + 1}" engine.execute(reset_query) reset.add(str(table)) return reset diff --git a/inbox/instrumentation.py b/inbox/instrumentation.py index 9ad2fddc3..e472f97e9 100644 --- a/inbox/instrumentation.py +++ b/inbox/instrumentation.py @@ -27,7 +27,8 @@ class ProfileCollector: """A simple stack sampler for low-overhead CPU profiling: samples the call stack every `interval` seconds and keeps track of counts by frame. Because - this uses signals, it only works on the main thread.""" + this uses signals, it only works on the main thread. + """ def __init__(self, interval=0.005): self.interval = interval diff --git a/inbox/logging.py b/inbox/logging.py index 151512661..26bb5f06f 100644 --- a/inbox/logging.py +++ b/inbox/logging.py @@ -53,14 +53,16 @@ def find_first_app_frame_and_name(ignores=None): def _record_level(logger, name, event_dict): """Processor that records the log level ('info', 'warning', etc.) in the - structlog event dictionary.""" + structlog event dictionary. + """ event_dict["level"] = name return event_dict def _record_module(logger, name, event_dict): """Processor that records the module and line where the logging call was - invoked.""" + invoked. + """ f, name = find_first_app_frame_and_name( ignores=[ "structlog", @@ -78,7 +80,8 @@ def _record_module(logger, name, event_dict): def safe_format_exception(etype, value, tb, limit=None): """Similar to structlog._format_exception, but truncate the exception part. This is because SQLAlchemy exceptions can sometimes have ludicrously large - exception strings.""" + exception strings. + """ if tb: list = ["Traceback (most recent call last):\n"] list = list + traceback.format_tb(tb, limit) diff --git a/inbox/mailsync/backends/gmail.py b/inbox/mailsync/backends/gmail.py index 2583e6d9c..975f6a6d1 100644 --- a/inbox/mailsync/backends/gmail.py +++ b/inbox/mailsync/backends/gmail.py @@ -423,7 +423,8 @@ def __deduplicate_message_object_creation(self, db_session, raw_messages, accoun def add_message_to_thread(self, db_session, message_obj, raw_message): """Associate message_obj to the right Thread object, creating a new thread if necessary. We rely on Gmail's threading as defined by - X-GM-THRID instead of our threading algorithm.""" + X-GM-THRID instead of our threading algorithm. + """ # NOTE: g_thrid == g_msgid on the first message in the thread message_obj.g_msgid = raw_message.g_msgid message_obj.g_thrid = raw_message.g_thrid @@ -438,7 +439,7 @@ def download_and_commit_uids(self, crispin_client, uids): start = datetime.utcnow() raw_messages = crispin_client.uids(uids) if not raw_messages: - return + return None new_uids = set() with self.syncmanager_lock, session_scope(self.namespace_id) as db_session: account = Account.get(self.account_id, db_session) diff --git a/inbox/mailsync/backends/imap/generic.py b/inbox/mailsync/backends/imap/generic.py index eef52fd60..664b7764a 100644 --- a/inbox/mailsync/backends/imap/generic.py +++ b/inbox/mailsync/backends/imap/generic.py @@ -602,7 +602,8 @@ def _count_thread_messages(self, thread_id, db_session): def add_message_to_thread(self, db_session, message_obj, raw_message): """Associate message_obj to the right Thread object, creating a new - thread if necessary.""" + thread if necessary. + """ with db_session.no_autoflush: # Disable autoflush so we don't try to flush a message with null # thread_id. @@ -988,8 +989,6 @@ def uidvalidity_cb(self, account_id, folder_name, select_info): class UidInvalid(Exception): """Raised when a folder's UIDVALIDITY changes, requiring a resync.""" - pass - # This version is elsewhere in the codebase, so keep it for now # TODO(emfree): clean this up. @@ -1009,9 +1008,7 @@ def uidvalidity_cb( ) if not is_valid: raise UidInvalid( - "folder: {}, remote uidvalidity: {}, " - "cached uidvalidity: {}".format( - folder_name, selected_uidvalidity, saved_uidvalidity - ) + f"folder: {folder_name}, remote uidvalidity: {selected_uidvalidity}, " + f"cached uidvalidity: {saved_uidvalidity}" ) return select_info diff --git a/inbox/mailsync/frontend.py b/inbox/mailsync/frontend.py index 5b2aea14a..f9934da9a 100644 --- a/inbox/mailsync/frontend.py +++ b/inbox/mailsync/frontend.py @@ -11,7 +11,8 @@ class ProfilingHTTPFrontend: """This is a lightweight embedded HTTP server that runs inside a mailsync or syncback process. It allows you to programmatically interact with the process: to get profile/memory/load metrics, or to schedule new account - syncs.""" + syncs. + """ def __init__(self, port, trace_greenlets, profile): self.port = port @@ -108,4 +109,3 @@ def build_metadata(): class _QuietHandler(WSGIRequestHandler): def log_request(self, *args, **kwargs): """Suppress request logging so as not to pollute application logs.""" - pass diff --git a/inbox/mailsync/service.py b/inbox/mailsync/service.py index a3b6b9409..45a248628 100644 --- a/inbox/mailsync/service.py +++ b/inbox/mailsync/service.py @@ -200,9 +200,7 @@ def poll(self): # Determine which accounts to sync start_accounts = self.account_ids_to_sync() statsd_client.gauge( - "mailsync.account_counts.{}.mailsync-{}.count".format( - self.host, self.process_number - ), + f"mailsync.account_counts.{self.host}.mailsync-{self.process_number}.count", len(start_accounts), ) @@ -377,7 +375,6 @@ def stop_sync(self, account_id): If that account doesn't exist, does nothing. """ - with self.semaphore: self.log.info("Stopping monitors", account_id=account_id) if account_id in self.email_sync_monitors: diff --git a/inbox/models/backends/imap.py b/inbox/models/backends/imap.py index b584cad87..d28ee36ea 100644 --- a/inbox/models/backends/imap.py +++ b/inbox/models/backends/imap.py @@ -125,7 +125,7 @@ class ImapUid(MailSyncBase, UpdatedAtMixin, DeletedAtMixin): extra_flags = Column(LittleJSON, default=[], nullable=False) # labels (Gmail-specific) # TO BE DEPRECATED - g_labels = Column(JSON, default=lambda: [], nullable=True) + g_labels = Column(JSON, default=list, nullable=True) def update_flags(self, new_flags: List[bytes]) -> None: """ diff --git a/inbox/models/backends/oauth.py b/inbox/models/backends/oauth.py index a53863270..3dec74500 100644 --- a/inbox/models/backends/oauth.py +++ b/inbox/models/backends/oauth.py @@ -189,6 +189,7 @@ def new_token( Args: force_refresh (bool): Whether a token refresh should be forced when requesting it from an external token service (AuthAlligator) + Returns: A tuple with the new access token and its expiration. diff --git a/inbox/models/category.py b/inbox/models/category.py index be12f732d..4e7213bcf 100644 --- a/inbox/models/category.py +++ b/inbox/models/category.py @@ -115,8 +115,8 @@ def find_or_create(cls, session, namespace_id, name, display_name, type_): obj.name = name else: log.error( - "Duplicate category rows for namespace_id {}, " - "name {}, display_name: {}".format(namespace_id, name, display_name) + f"Duplicate category rows for namespace_id {namespace_id}, " + f"name {name}, display_name: {display_name}" ) raise MultipleResultsFound( "Duplicate category rows for namespace_id {}, name {}, " diff --git a/inbox/models/event.py b/inbox/models/event.py index 576427fc8..0c9a2d6dd 100644 --- a/inbox/models/event.py +++ b/inbox/models/event.py @@ -247,7 +247,6 @@ def _partial_participants_merge(self, event): It would be very wrong to call this method to merge, say, Google Events participants because they handle the merging themselves. """ - # We have to jump through some hoops because a participant may # not have an email or may not have a name, so we build a hash # where we can find both. Also note that we store names in the @@ -334,7 +333,7 @@ def recurring(self): r = [r] return r except (ValueError, SyntaxError): - log.warn( + log.warning( "Invalid RRULE entry for event", event_id=self.id, raw_rrule=self.recurrence, @@ -392,7 +391,7 @@ def calendar_event_link(self): try: return json.loads(self.raw_data)["htmlLink"] except (ValueError, KeyError): - return + return None @property def emails_from_description(self): @@ -426,8 +425,8 @@ def create(cls, **kwargs): def __init__(self, **kwargs): if ( - not kwargs.pop("__event_created_sanely", None) - is _EVENT_CREATED_SANELY_SENTINEL + kwargs.pop("__event_created_sanely", None) + is not _EVENT_CREATED_SANELY_SENTINEL ): raise AssertionError( "Use Event.create with appropriate keyword args " @@ -629,7 +628,7 @@ def update(self, master): def insert_warning(mapper, connection, target): - log.warn(f"InflatedEvent {target} shouldn't be committed") + log.warning(f"InflatedEvent {target} shouldn't be committed") raise Exception("InflatedEvent should not be committed") diff --git a/inbox/models/folder.py b/inbox/models/folder.py index 70195ba8b..863277b19 100644 --- a/inbox/models/folder.py +++ b/inbox/models/folder.py @@ -99,11 +99,7 @@ def find_or_create(cls, session, account, name, role=None): ) session.add(obj) except MultipleResultsFound: - log.info( - "Duplicate folder rows for name {}, account_id {}".format( - name, account.id - ) - ) + log.info(f"Duplicate folder rows for name {name}, account_id {account.id}") raise return obj diff --git a/inbox/models/message.py b/inbox/models/message.py index ad2bb4a70..34aa5b136 100644 --- a/inbox/models/message.py +++ b/inbox/models/message.py @@ -130,12 +130,12 @@ def thread(self, value): thread = synonym("_thread", descriptor=thread) - from_addr = Column(JSON, nullable=False, default=lambda: []) + from_addr = Column(JSON, nullable=False, default=list) sender_addr = Column(JSON, nullable=True) - reply_to = Column(JSON, nullable=True, default=lambda: []) - to_addr = Column(JSON, nullable=False, default=lambda: []) - cc_addr = Column(JSON, nullable=False, default=lambda: []) - bcc_addr = Column(JSON, nullable=False, default=lambda: []) + reply_to = Column(JSON, nullable=True, default=list) + to_addr = Column(JSON, nullable=False, default=list) + cc_addr = Column(JSON, nullable=False, default=list) + bcc_addr = Column(JSON, nullable=False, default=list) in_reply_to = Column(JSON, nullable=True) # From: http://tools.ietf.org/html/rfc4130, section 5.3.3, # max message_id_header is 998 characters @@ -221,8 +221,8 @@ def regenerate_nylas_uid(self) -> None: The value of nylas_uid is simply the draft public_id and version, concatenated. Because the nylas_uid identifies the draft on the remote provider, we regenerate it on each draft revision so that we can delete - the old draft and add the new one on the remote.""" - + the old draft and add the new one on the remote. + """ from inbox.sendmail.message import generate_message_id_header self.nylas_uid = f"{self.public_id}-{self.version}" diff --git a/inbox/models/roles.py b/inbox/models/roles.py index 1d853ae54..b6071de45 100644 --- a/inbox/models/roles.py +++ b/inbox/models/roles.py @@ -48,9 +48,7 @@ def data(self): message = self.parts[0].message # only grab one account = message.namespace.account - statsd_string = "api.direct_fetching.{}.{}".format( - account.provider, account.id - ) + statsd_string = f"api.direct_fetching.{account.provider}.{account.id}" # Try to fetch the message from S3 first. with statsd_client.timer(f"{statsd_string}.blockstore_latency"): diff --git a/inbox/models/session.py b/inbox/models/session.py index f04ccae76..b836ef165 100644 --- a/inbox/models/session.py +++ b/inbox/models/session.py @@ -119,7 +119,6 @@ def after_flush(session, flush_context): bump_redis_txn_id(session) except Exception: log.exception("bump_redis_txn_id exception") - pass create_revisions(session) return session @@ -177,7 +176,7 @@ def session_scope(id_, versioned=True): session.rollback() raise except OperationalError: - log.warn( + log.warning( "Encountered OperationalError on rollback", original_exception=type(exc) ) raise exc diff --git a/inbox/models/util.py b/inbox/models/util.py index 2e41ebd58..f7eab3141 100644 --- a/inbox/models/util.py +++ b/inbox/models/util.py @@ -166,7 +166,6 @@ def delete_namespace(namespace_id, throttle=False, dry_run=False): Raises AccountDeletionErrror with message if there are problems """ - with session_scope(namespace_id) as db_session: try: account = ( @@ -296,13 +295,11 @@ def _batch_delete( if table in ("message", "block"): query = "" else: - query = "DELETE FROM {} WHERE {}={} LIMIT {};".format( - table, column, id_, CHUNK_SIZE - ) + query = f"DELETE FROM {table} WHERE {column}={id_} LIMIT {CHUNK_SIZE};" log.info("deleting", account_id=account_id, table=table) - for _ in range(0, batches): + for _ in range(batches): if throttle: bulk_throttle() @@ -405,12 +402,12 @@ def purge_transactions( offset = 0 query = ( "SELECT id FROM transaction where created_at < " - "DATE_SUB({}, INTERVAL {} day) LIMIT {}".format(start, days_ago, limit) + f"DATE_SUB({start}, INTERVAL {days_ago} day) LIMIT {limit}" ) else: query = ( - "DELETE FROM transaction where created_at < DATE_SUB({}," - " INTERVAL {} day) LIMIT {}".format(start, days_ago, limit) + f"DELETE FROM transaction where created_at < DATE_SUB({start}," + f" INTERVAL {days_ago} day) LIMIT {limit}" ) try: # delete from rows until there are no more rows affected diff --git a/inbox/s3/backends/gmail.py b/inbox/s3/backends/gmail.py index b6cc76af6..f0a7bdb72 100644 --- a/inbox/s3/backends/gmail.py +++ b/inbox/s3/backends/gmail.py @@ -29,9 +29,7 @@ def get_gmail_raw_contents(message): g_msgid = int(g_msgid) hex_id = format(g_msgid, "x") - url = "https://www.googleapis.com/gmail/v1/users/me/messages/{}?format=raw".format( - hex_id - ) + url = f"https://www.googleapis.com/gmail/v1/users/me/messages/{hex_id}?format=raw" r = requests.get(url, auth=OAuthRequestsWrapper(auth_token)) if r.status_code != 200: diff --git a/inbox/s3/exc.py b/inbox/s3/exc.py index e2393773c..7983da649 100644 --- a/inbox/s3/exc.py +++ b/inbox/s3/exc.py @@ -9,11 +9,8 @@ class EmailFetchException(S3Exception): class EmailDeletedException(EmailFetchException): """Raises an error when the message is deleted on the remote.""" - pass - class TemporaryEmailFetchException(EmailFetchException): """A class for temporary errors when trying to fetch emails. - Exchange notably seems to need warming up before fetching data.""" - - pass + Exchange notably seems to need warming up before fetching data. + """ diff --git a/inbox/search/backends/imap.py b/inbox/search/backends/imap.py index de29709a3..6ddb05d22 100644 --- a/inbox/search/backends/imap.py +++ b/inbox/search/backends/imap.py @@ -51,7 +51,9 @@ def _open_crispin_connection(self, db_session): try: acct_provider_info = provider_info(account.provider) except NotSupportedError: - self.log.warn("Account provider not supported", provider=account.provider) + self.log.warning( + "Account provider not supported", provider=account.provider + ) raise self.crispin_client = CrispinClient( @@ -199,7 +201,7 @@ def _search_folder(self, folder, criteria, charset): try: self.crispin_client.select_folder(folder.name, uidvalidity_cb) except FolderMissingError: - self.log.warn("Won't search missing IMAP folder", exc_info=True) + self.log.warning("Won't search missing IMAP folder", exc_info=True) return [] except UidInvalid: self.log.error( @@ -210,7 +212,7 @@ def _search_folder(self, folder, criteria, charset): try: uids = self.crispin_client.conn.search(criteria, charset=charset) except IMAP4.error: - self.log.warn("Search error", exc_info=True) + self.log.warning("Search error", exc_info=True) raise SearchBackendException( ("Unknown IMAP error when performing search."), 503 ) diff --git a/inbox/search/base.py b/inbox/search/base.py index 07aaff055..c3a6e67bd 100644 --- a/inbox/search/base.py +++ b/inbox/search/base.py @@ -9,7 +9,8 @@ def get_search_client(account): class SearchBackendException(Exception): """Raised if there's an error proxying the search request to the - provider.""" + provider. + """ def __init__(self, message, http_code, server_error=None): self.message = message @@ -24,7 +25,8 @@ def __str__(self): class SearchStoreException(Exception): """Raised if there's an error proxying the search request to the provider. This is a special EAS case where the Status code for the Store element has - an error""" + an error + """ def __init__(self, err_code): self.err_code = err_code diff --git a/inbox/security/oracles.py b/inbox/security/oracles.py index d43fa4800..dd6598c1a 100644 --- a/inbox/security/oracles.py +++ b/inbox/security/oracles.py @@ -151,7 +151,6 @@ def decrypt(self, ciphertext, encryption_scheme): Returns the plaintext as bytes. """ - if self._closed: raise ValueError("Connection to crypto oracle already closed") diff --git a/inbox/sendmail/base.py b/inbox/sendmail/base.py index 1f8d82468..3a1e078c1 100644 --- a/inbox/sendmail/base.py +++ b/inbox/sendmail/base.py @@ -55,14 +55,10 @@ def create_draft_from_mime( ) -> Message: our_uid = generate_public_id() # base-36 encoded string new_headers = ( - ( - "X-INBOX-ID: {0}-0\r\n" - "Message-Id: <{0}-0@mailer.nylas.com>\r\n" - "User-Agent: NylasMailer/{1}\r\n" - ) - .format(our_uid, VERSION) - .encode() - ) + f"X-INBOX-ID: {our_uid}-0\r\n" + f"Message-Id: <{our_uid}-0@mailer.nylas.com>\r\n" + f"User-Agent: NylasMailer/{VERSION}\r\n" + ).encode() new_body = new_headers + raw_mime with db_session.no_autoflush: @@ -117,8 +113,8 @@ def block_to_part(block, message, namespace): def create_message_from_json(data, namespace, db_session, is_draft): """Construct a Message instance from `data`, a dictionary representing the POST body of an API request. All new objects are added to the session, but - not committed.""" - + not committed. + """ # Validate the input and get referenced objects (thread, attachments) # as necessary. to_addr = get_recipients(data.get("to"), "to") @@ -382,7 +378,8 @@ def generate_attachments(message, blocks): def _set_reply_headers(new_message, previous_message): """When creating a draft in reply to a thread, set the In-Reply-To and - References headers appropriately, if possible.""" + References headers appropriately, if possible. + """ if previous_message.message_id_header: new_message.in_reply_to = previous_message.message_id_header if previous_message.references: diff --git a/inbox/sendmail/message.py b/inbox/sendmail/message.py index 4ad87f856..9a297914d 100644 --- a/inbox/sendmail/message.py +++ b/inbox/sendmail/message.py @@ -214,7 +214,6 @@ def add_nylas_headers(msg, nylas_uid): public_id of the message object. """ - # Set our own custom header for tracking in `Sent Mail` folder msg.headers["X-INBOX-ID"] = nylas_uid msg.headers["Message-Id"] = generate_message_id_header(nylas_uid) diff --git a/inbox/sendmail/smtp/postel.py b/inbox/sendmail/smtp/postel.py index 4434a81ac..6ae60cf44 100644 --- a/inbox/sendmail/smtp/postel.py +++ b/inbox/sendmail/smtp/postel.py @@ -107,7 +107,6 @@ def _substitute_bcc(raw_message: bytes) -> bytes: """ Substitute BCC in raw message. """ - bcc_regexp = re.compile(rb"^Bcc: [^\r\n]*\r\n", re.IGNORECASE | re.MULTILINE) return bcc_regexp.sub(b"", raw_message) @@ -201,9 +200,9 @@ def _smtp_oauth2_try_refresh(self): ) def _try_xoauth2(self): - auth_string = "user={}\1auth=Bearer {}\1\1".format( - self.email_address, self.auth_token - ).encode() + auth_string = ( + f"user={self.email_address}\1auth=Bearer {self.auth_token}\1\1".encode() + ) code, resp = self.connection.docmd( "AUTH", f"XOAUTH2 {base64.b64encode(auth_string).decode()}" ) diff --git a/inbox/sqlalchemy_ext/util.py b/inbox/sqlalchemy_ext/util.py index 858ff0d8b..60a0a5f73 100644 --- a/inbox/sqlalchemy_ext/util.py +++ b/inbox/sqlalchemy_ext/util.py @@ -69,7 +69,8 @@ def before_commit(conn): class ABCMixin: """Use this if you want a mixin class which is actually an abstract base class, for example in order to enforce that concrete subclasses define - particular methods or properties.""" + particular methods or properties. + """ __abstract__ = True diff --git a/inbox/transactions/actions.py b/inbox/transactions/actions.py index f27516cd0..a7cf36ecb 100644 --- a/inbox/transactions/actions.py +++ b/inbox/transactions/actions.py @@ -147,7 +147,7 @@ def __init__( and key % total_processes == process_number ] else: - self.log.warn( + self.log.warning( "No shards assigned to syncback server", syncback_id=syncback_id ) self.keys = [] @@ -307,7 +307,7 @@ def _get_batch_task(self, db_session, log_entries, has_more): valid log entries. """ if not log_entries: - return + return None namespace = log_entries[0].namespace account_id = namespace.account.id semaphore = self.account_semaphores[account_id] @@ -414,13 +414,13 @@ def _batch_log_entries(self, db_session, log_entries): action_log_id=log_entry.id, retries=log_entry.retries, ) - return + return None valid_log_entries.append(log_entry) batch_task = self._get_batch_task(db_session, valid_log_entries, has_more) if not batch_task: - return + return None for task in batch_task.tasks: self.running_action_ids.update(task.action_log_ids) self.log.debug( diff --git a/inbox/transactions/delta_sync.py b/inbox/transactions/delta_sync.py index 7e944ac6e..99e9a0885 100644 --- a/inbox/transactions/delta_sync.py +++ b/inbox/transactions/delta_sync.py @@ -277,6 +277,7 @@ def streaming_change_generator( """ Poll the transaction log for the given `namespace_id` until `timeout` expires, and yield each time new entries are detected. + Arguments --------- namespace_id: int diff --git a/inbox/transactions/search.py b/inbox/transactions/search.py index 093cdfab8..7e45674cd 100644 --- a/inbox/transactions/search.py +++ b/inbox/transactions/search.py @@ -75,7 +75,7 @@ def _set_transaction_pointers(self): self.transaction_pointers[key] = 0 def _index_transactions(self, namespace_ids=None): - """index with filter""" + """Index with filter""" # index 'em for key in engine_manager.engines: shard_should_sleep = [] diff --git a/inbox/util/__init__.py b/inbox/util/__init__.py index f1000f72e..1f9e36ac7 100644 --- a/inbox/util/__init__.py +++ b/inbox/util/__init__.py @@ -1,6 +1,6 @@ -""" Non-server-specific utility modules. These shouldn't depend on any code - from the inbox module tree! +"""Non-server-specific utility modules. These shouldn't depend on any code +from the inbox module tree! - Don't add new code here! Find the relevant submodule, or use misc.py if - there's really no other place. +Don't add new code here! Find the relevant submodule, or use misc.py if +there's really no other place. """ diff --git a/inbox/util/blockstore.py b/inbox/util/blockstore.py index dd4011bcd..079b40c8b 100644 --- a/inbox/util/blockstore.py +++ b/inbox/util/blockstore.py @@ -142,7 +142,7 @@ def _get_from_s3_bucket(data_sha256, bucket_name): if not key: log.warning(f"No key with name: {data_sha256} returned!") - return + return None return key.get_contents_as_string() @@ -156,13 +156,13 @@ def _get_from_disk(data_sha256): return f.read() except OSError: log.warning(f"No file with name: {data_sha256}!") - return + return None def _delete_from_s3_bucket(data_sha256_hashes, bucket_name): data_sha256_hashes = [hash_ for hash_ in data_sha256_hashes if hash_] if not data_sha256_hashes: - return None + return assert "AWS_ACCESS_KEY_ID" in config, "Need AWS key!" assert "AWS_SECRET_ACCESS_KEY" in config, "Need AWS secret!" @@ -180,7 +180,7 @@ def _delete_from_s3_bucket(data_sha256_hashes, bucket_name): def _delete_from_disk(data_sha256): if not data_sha256: - return None + return try: os.remove(_data_file_path(data_sha256)) diff --git a/inbox/util/db.py b/inbox/util/db.py index 9f4e42868..3876143c8 100644 --- a/inbox/util/db.py +++ b/inbox/util/db.py @@ -15,7 +15,6 @@ def drop_everything(engine, keep_tables=None, reset_columns=None): default value in the tables that we're keeping - provided as a dict of table_name: list_of_column_names. """ - keep_tables = keep_tables or [] reset_columns = reset_columns or {} conn = engine.connect() diff --git a/inbox/util/debug.py b/inbox/util/debug.py index c9e9d18ef..bfecae3b3 100644 --- a/inbox/util/debug.py +++ b/inbox/util/debug.py @@ -23,7 +23,8 @@ def wrapper(*args, **kwargs): def attach_pyinstrument_profiler(): """Run the pyinstrument profiler in the background and dump its output to stdout when the process receives SIGTRAP. In general, you probably want to - use the facilities in inbox.util.profiling instead.""" + use the facilities in inbox.util.profiling instead. + """ profiler = Profiler() profiler.start() diff --git a/inbox/util/misc.py b/inbox/util/misc.py index 1adda8392..6a97dc112 100644 --- a/inbox/util/misc.py +++ b/inbox/util/misc.py @@ -145,7 +145,8 @@ def register_backends(base_name, base_path): def cleanup_subject(subject_str): """Clean-up a message subject-line, including whitespace. - For instance, 'Re: Re: Re: Birthday party' becomes 'Birthday party'""" + For instance, 'Re: Re: Re: Birthday party' becomes 'Birthday party' + """ if subject_str is None: return "" # TODO consider expanding to all diff --git a/inbox/util/testutils.py b/inbox/util/testutils.py index f59a646cf..37aff4105 100644 --- a/inbox/util/testutils.py +++ b/inbox/util/testutils.py @@ -42,15 +42,14 @@ def create_test_db(): for name, host, user, password in schemas: cmd = ( - "DROP DATABASE IF EXISTS {name}; " - "CREATE DATABASE IF NOT EXISTS {name} " + f"DROP DATABASE IF EXISTS {name}; " + f"CREATE DATABASE IF NOT EXISTS {name} " "DEFAULT CHARACTER SET utf8mb4 DEFAULT COLLATE " - "utf8mb4_general_ci".format(name=name) + "utf8mb4_general_ci" ) subprocess.check_call( - "mysql -h {} -u{} -p{} " '-e "{}"'.format(host, user, password, cmd), - shell=True, + f"mysql -h {host} -u{user} -p{password} " f'-e "{cmd}"', shell=True ) @@ -137,7 +136,8 @@ def mock_query(self, domain, record_type): class MockIMAPClient: """A bare-bones stand-in for an IMAPClient instance, used to test sync - logic without requiring a real IMAP account and server.""" + logic without requiring a real IMAP account and server. + """ def __init__(self): self._data = {} diff --git a/inbox/util/threading.py b/inbox/util/threading.py index 4f16a64c9..36ccd7f61 100644 --- a/inbox/util/threading.py +++ b/inbox/util/threading.py @@ -12,8 +12,9 @@ def fetch_corresponding_thread(db_session, namespace_id, message): - """fetch a thread matching the corresponding message. Returns None if - there's no matching thread.""" + """Fetch a thread matching the corresponding message. Returns None if + there's no matching thread. + """ # handle the case where someone is self-sending an email. if not message.from_addr or not message.to_addr: return None diff --git a/inbox/util/url.py b/inbox/util/url.py index 697944434..df6602969 100644 --- a/inbox/util/url.py +++ b/inbox/util/url.py @@ -196,7 +196,6 @@ def matching_subdomains(new_value, old_value): 2. shares the same top-level domain name. """ - if new_value is None and old_value is not None: return False diff --git a/inbox/webhooks/microsoft_notifications.py b/inbox/webhooks/microsoft_notifications.py index c517ba815..33075ed5f 100644 --- a/inbox/webhooks/microsoft_notifications.py +++ b/inbox/webhooks/microsoft_notifications.py @@ -60,7 +60,6 @@ def _validate_webhook_payload(*args, **kwargs): have two separate endpoints, one for calendar changes and one for event changes. """ - try: request.json except UnsupportedMediaType: diff --git a/migrations/versions/000_g_msgid_g_thrid_as_integers.py b/migrations/versions/000_g_msgid_g_thrid_as_integers.py index 4f2cf607a..18bca8784 100644 --- a/migrations/versions/000_g_msgid_g_thrid_as_integers.py +++ b/migrations/versions/000_g_msgid_g_thrid_as_integers.py @@ -1,4 +1,4 @@ -""" Store g_msgid and g_thrid as integers, not strings. For more efficiency. +"""Store g_msgid and g_thrid as integers, not strings. For more efficiency. Revision ID: 2605b23e1fe6 Revises: None diff --git a/migrations/versions/002_store_g_thrid_as_biginteger_instead_of_.py b/migrations/versions/002_store_g_thrid_as_biginteger_instead_of_.py index eed33afe4..02737e0cb 100644 --- a/migrations/versions/002_store_g_thrid_as_biginteger_instead_of_.py +++ b/migrations/versions/002_store_g_thrid_as_biginteger_instead_of_.py @@ -1,4 +1,4 @@ -""" Change g_thrid as BigInteger instead of string +"""Change g_thrid as BigInteger instead of string Revision ID: 297aa1e1acc7 Revises: 217431caacc7 diff --git a/migrations/versions/005_import_old_accounts.py b/migrations/versions/005_import_old_accounts.py index f5fcefa30..6556cbd8e 100644 --- a/migrations/versions/005_import_old_accounts.py +++ b/migrations/versions/005_import_old_accounts.py @@ -23,7 +23,7 @@ def upgrade(): from inbox.models.session import session_scope engine = main_engine(pool_size=1, max_overflow=0) - import inbox.auth.gmail as gmail + from inbox.auth import gmail from inbox.models.backends.imap import ImapAccount # Assert we have the dump file @@ -36,7 +36,7 @@ def upgrade(): return # Imports to `imapaccount_old` table - with open(SQL_DUMP_FILENAME, "r") as f: + with open(SQL_DUMP_FILENAME) as f: print("Importing old account data..."), op.execute(f.read()) print("OK!") @@ -51,13 +51,13 @@ class ImapAccount_Old(Base): migrated_accounts = [] for acct in db_session.query(ImapAccount_Old): - print("Importing {0}".format(acct.email_address)) + print(f"Importing {acct.email_address}") existing_account = db_session.query(ImapAccount).filter_by( email_address=acct.email_address ) if existing_account.count() > 0: - print("Already have account for {0}".format(acct.email_address)) + print(f"Already have account for {acct.email_address}") continue # Create a mock OAuth response using data from the old table @@ -86,23 +86,19 @@ class ImapAccount_Old(Base): db_session.commit() migrated_accounts.append(new_account) - print("\nDone! Imported {0} accounts.".format(len(migrated_accounts))) + print(f"\nDone! Imported {len(migrated_accounts)} accounts.") print("\nNow verifying refresh tokens...\n") verified_accounts = [] for acct in migrated_accounts: - print("Verifying {0}... ".format(acct.email_address)), + print(f"Verifying {acct.email_address}... "), if gmail.verify_account(acct): verified_accounts.append(acct) print("OK!") else: print("FAILED!") - print( - "Done! Verified {0} of {1}".format( - len(verified_accounts), len(migrated_accounts) - ) - ) + print(f"Done! Verified {len(verified_accounts)} of {len(migrated_accounts)}") op.drop_table("imapaccount_old") diff --git a/migrations/versions/017_haspublicid.py b/migrations/versions/017_haspublicid.py index 1430124c8..1245d9410 100644 --- a/migrations/versions/017_haspublicid.py +++ b/migrations/versions/017_haspublicid.py @@ -52,7 +52,7 @@ def upgrade(): for c in classes: assert issubclass(c, HasPublicID) - print("[{0}] adding public_id column... ".format(c.__tablename__)), + print(f"[{c.__tablename__}] adding public_id column... "), sys.stdout.flush() op.add_column( c.__tablename__, sa.Column("public_id", mysql.BINARY(16), nullable=False) @@ -60,7 +60,7 @@ def upgrade(): print("adding index... "), op.create_index( - "ix_{0}_public_id".format(c.__tablename__), + f"ix_{c.__tablename__}_public_id", c.__tablename__, ["public_id"], unique=False, @@ -75,7 +75,7 @@ def upgrade(): count = 0 for c in classes: garbage_collect() - print("[{0}] Loading rows. ".format(c.__name__)), + print(f"[{c.__name__}] Loading rows. "), sys.stdout.flush() print("Generating public_ids"), sys.stdout.flush() @@ -87,7 +87,7 @@ def upgrade(): sys.stdout.flush() db_session.commit() garbage_collect() - sys.stdout.write(" Saving. ".format(c.__name__)), + sys.stdout.write(" Saving. ".format()), # sys.stdout.flush() sys.stdout.flush() db_session.commit() @@ -125,12 +125,10 @@ def downgrade(): for c in classes: assert issubclass(c, HasPublicID) - print("[{0}] Dropping public_id column... ".format(c.__tablename__)), + print(f"[{c.__tablename__}] Dropping public_id column... "), op.drop_column(c.__tablename__, "public_id") print("Dropping index... "), - op.drop_index( - "ix_{0}_public_id".format(c.__tablename__), table_name=c.__tablename__ - ) + op.drop_index(f"ix_{c.__tablename__}_public_id", table_name=c.__tablename__) print("Done.") diff --git a/migrations/versions/022_webhooks_and_filters.py b/migrations/versions/022_webhooks_and_filters.py index b0d653d72..6726a485f 100644 --- a/migrations/versions/022_webhooks_and_filters.py +++ b/migrations/versions/022_webhooks_and_filters.py @@ -1,4 +1,4 @@ -""" Rename WebhookParameters -> Webhook +"""Rename WebhookParameters -> Webhook Note that this migration deletes old webhook data. This is OK because we haven't stored any webhooks yet. diff --git a/migrations/versions/038_add_public_ids_to_transactions.py b/migrations/versions/038_add_public_ids_to_transactions.py index b57856d65..79ee9a523 100644 --- a/migrations/versions/038_add_public_ids_to_transactions.py +++ b/migrations/versions/038_add_public_ids_to_transactions.py @@ -47,7 +47,7 @@ class Transaction(Base): with session_scope(versioned=False) as db_session: count = 0 (num_transactions,) = db_session.query(sa.func.max(Transaction.id)).one() - print("Adding public ids to {} transactions".format(num_transactions)) + print(f"Adding public ids to {num_transactions} transactions") for pointer in range(0, num_transactions + 1, 500): for entry in db_session.query(Transaction).filter( Transaction.id >= pointer, Transaction.id < pointer + 500 diff --git a/migrations/versions/045_new_password_storage.py b/migrations/versions/045_new_password_storage.py index 344bbe73f..119f79079 100644 --- a/migrations/versions/045_new_password_storage.py +++ b/migrations/versions/045_new_password_storage.py @@ -73,12 +73,12 @@ def _keyfile(self, create_dir=True): assert KEY_DIR if create_dir: mkdirp(KEY_DIR) - key_filename = "{0}".format(sha256(self.key).hexdigest()) + key_filename = f"{sha256(self.key).hexdigest()}" return os.path.join(KEY_DIR, key_filename) def get_old_password(self): if self.password_aes is not None: - with open(self._keyfile, "r") as f: + with open(self._keyfile) as f: key = f.read() key = self.key + key diff --git a/migrations/versions/056_message_unique_constraint.py b/migrations/versions/056_message_unique_constraint.py index 719cdaffd..d2b00c9a8 100644 --- a/migrations/versions/056_message_unique_constraint.py +++ b/migrations/versions/056_message_unique_constraint.py @@ -1,4 +1,4 @@ -""" Remove duplicated Gmail Message objects and tighten constraints for Gmail messages. +"""Remove duplicated Gmail Message objects and tighten constraints for Gmail messages. Revision ID: 4b4c5579c083 Revises: 1925c535a52d @@ -65,9 +65,7 @@ def upgrade(): for message_id, thread_id, g_msgid in groups: print( - "deleting duplicates of ({}, {}), saving {}".format( - thread_id, g_msgid, message_id - ) + f"deleting duplicates of ({thread_id}, {g_msgid}), saving {message_id}" ) db_session.query(Message).filter( Message.thread_id == thread_id, diff --git a/migrations/versions/089_revert_encryption.py b/migrations/versions/089_revert_encryption.py index 3ad843533..ddb051791 100644 --- a/migrations/versions/089_revert_encryption.py +++ b/migrations/versions/089_revert_encryption.py @@ -1,4 +1,4 @@ -""" Revert encryption +"""Revert encryption Revision ID: 2c577a8a01b7 Revises: 2b89164aa9cd diff --git a/migrations/versions/143_add_reply_to_message_id.py b/migrations/versions/143_add_reply_to_message_id.py index 9c4e69370..add217cd4 100644 --- a/migrations/versions/143_add_reply_to_message_id.py +++ b/migrations/versions/143_add_reply_to_message_id.py @@ -25,7 +25,7 @@ def upgrade(): WHERE table_name='message' AND referenced_table_name='message' AND constraint_schema=DATABASE()""" ).fetchone()[0] - conn.execute("ALTER TABLE message DROP FOREIGN KEY {}".format(constraint_name)) + conn.execute(f"ALTER TABLE message DROP FOREIGN KEY {constraint_name}") conn.execute( "ALTER TABLE message CHANGE resolved_message_id reply_to_message_id INT(11)" ) @@ -43,7 +43,7 @@ def downgrade(): WHERE table_name='message' AND referenced_table_name='message' AND constraint_schema=DATABASE()""" ).fetchone()[0] - conn.execute("ALTER TABLE message DROP FOREIGN KEY {}".format(constraint_name)) + conn.execute(f"ALTER TABLE message DROP FOREIGN KEY {constraint_name}") conn.execute("ALTER TABLE message DROP FOREIGN KEY message_ibfk_3") conn.execute( "ALTER TABLE message CHANGE reply_to_message_id resolved_message_id INT(11)" diff --git a/migrations/versions/150_add_polymorphic_events.py b/migrations/versions/150_add_polymorphic_events.py index 5676c4a7d..471bf3689 100644 --- a/migrations/versions/150_add_polymorphic_events.py +++ b/migrations/versions/150_add_polymorphic_events.py @@ -79,7 +79,7 @@ def populate(): try: raw_data = ast.literal_eval(e.raw_data) except: - print("Could not load raw data for event {}".format(e.id)) + print(f"Could not load raw data for event {e.id}") continue e.recurrence = raw_data["recurrence"] db.commit() @@ -98,7 +98,7 @@ def populate(): try: db.execute(create) except Exception as e: - print("Couldn't insert RecurringEventOverrides: {}".format(e)) + print(f"Couldn't insert RecurringEventOverrides: {e}") exit(2) print("done.") @@ -113,7 +113,7 @@ def populate(): try: raw_data = ast.literal_eval(e.raw_data) except: - print("Could not load raw data for event {}".format(e.id)) + print(f"Could not load raw data for event {e.id}") continue rec_uid = raw_data.get("recurringEventId") if rec_uid: @@ -133,7 +133,7 @@ def populate(): print(".", end=" ") sys.stdout.flush() db.commit() - print("done. ({} modified)".format(c)) + print(f"done. ({c} modified)") # Convert Event to RecurringEvent print("Updating types for RecurringEvent...", end=" ") @@ -148,7 +148,7 @@ def populate(): try: db.execute(create) except Exception as e: - print("Couldn't insert RecurringEvents: {}".format(e)) + print(f"Couldn't insert RecurringEvents: {e}") exit(2) print("done.") @@ -164,7 +164,7 @@ def populate(): try: raw_data = ast.literal_eval(r.raw_data) except: - print("Could not load raw data for event {}".format(r.id)) + print(f"Could not load raw data for event {r.id}") continue r.start_timezone = raw_data["start"].get("timeZone") # find any un-found overrides that didn't have masters earlier @@ -175,7 +175,7 @@ def populate(): print(".", end=" ") sys.stdout.flush() db.commit() - print("done. ({} modified)".format(c)) + print(f"done. ({c} modified)") # Finally, convert all remaining Events to type='event' convert = """UPDATE event SET type='event' WHERE type IS NULL""" diff --git a/migrations/versions/157_update_eas_schema.py b/migrations/versions/157_update_eas_schema.py index 339a5961b..09a4d20f4 100644 --- a/migrations/versions/157_update_eas_schema.py +++ b/migrations/versions/157_update_eas_schema.py @@ -31,9 +31,7 @@ def upgrade(): AND constraint_schema=DATABASE()""" ).fetchall() for (folder_fk,) in folder_fks: - conn.execute( - "ALTER TABLE easfoldersyncstatus DROP FOREIGN KEY {}".format(folder_fk) - ) + conn.execute(f"ALTER TABLE easfoldersyncstatus DROP FOREIGN KEY {folder_fk}") # Drop folder_id foreign key constraint from easuid table folder_fks = conn.execute( @@ -43,7 +41,7 @@ def upgrade(): AND constraint_schema=DATABASE()""" ).fetchall() for (folder_fk,) in folder_fks: - conn.execute("ALTER TABLE easuid DROP FOREIGN KEY {}".format(folder_fk)) + conn.execute(f"ALTER TABLE easuid DROP FOREIGN KEY {folder_fk}") # Add new index on easuid table conn.execute( diff --git a/migrations/versions/160_split_actionlog.py b/migrations/versions/160_split_actionlog.py index 8ce0f4413..5247cdf34 100644 --- a/migrations/versions/160_split_actionlog.py +++ b/migrations/versions/160_split_actionlog.py @@ -36,7 +36,7 @@ def upgrade(): .options(contains_eager(ActionLog.namespace, Namespace.account)) ) - print("Updating {} action_log entries".format(q.count())) + print(f"Updating {q.count()} action_log entries") for a in q.all(): a.type = "actionlog" diff --git a/migrations/versions/164_add_decode_error_index.py b/migrations/versions/164_add_decode_error_index.py index 86707ad47..a340d35cd 100644 --- a/migrations/versions/164_add_decode_error_index.py +++ b/migrations/versions/164_add_decode_error_index.py @@ -1,4 +1,4 @@ -"""add message.decode_error index """ +"""add message.decode_error index""" revision = "17dcbd7754e0" down_revision = "457164360472" diff --git a/migrations/versions/171_update_easuid_schema_3.py b/migrations/versions/171_update_easuid_schema_3.py index a968cb231..a43173696 100644 --- a/migrations/versions/171_update_easuid_schema_3.py +++ b/migrations/versions/171_update_easuid_schema_3.py @@ -36,7 +36,6 @@ class EASUid(Base): {"server_id": sa.func.concat(EASUid.fld_uid, ":", EASUid.msg_uid)}, synchronize_session=False, ) - pass def downgrade(): diff --git a/migrations/versions/175_fix_recurring_override_cascade.py b/migrations/versions/175_fix_recurring_override_cascade.py index 80db66d0a..0f7bda8b2 100644 --- a/migrations/versions/175_fix_recurring_override_cascade.py +++ b/migrations/versions/175_fix_recurring_override_cascade.py @@ -28,7 +28,7 @@ def upgrade(): ).fetchone() if fk_delete == "CASCADE": - print("Checked fk: {}. This migration is not needed, skipping.".format(fk_name)) + print(f"Checked fk: {fk_name}. This migration is not needed, skipping.") return conn.execute(text("set @@lock_wait_timeout = 20;")) diff --git a/migrations/versions/180_migrate_event_descriptions.py b/migrations/versions/180_migrate_event_descriptions.py index 78ad9e294..0c1832e21 100644 --- a/migrations/versions/180_migrate_event_descriptions.py +++ b/migrations/versions/180_migrate_event_descriptions.py @@ -21,7 +21,7 @@ def upgrade(): "UPDATE event SET _description=description " "WHERE _description IS NULL AND description IS NOT NULL LIMIT 100000" ) - print("Updated {} rows".format(res.rowcount)) + print(f"Updated {res.rowcount} rows") if res.rowcount == 0: return diff --git a/migrations/versions/213_add_metadata_table.py b/migrations/versions/213_add_metadata_table.py index a3e7e2022..02987cd5e 100644 --- a/migrations/versions/213_add_metadata_table.py +++ b/migrations/versions/213_add_metadata_table.py @@ -69,7 +69,7 @@ def upgrade(): conn = op.get_bind() increment = (shard_id << 48) + 1 - conn.execute("ALTER TABLE metadata AUTO_INCREMENT={}".format(increment)) + conn.execute(f"ALTER TABLE metadata AUTO_INCREMENT={increment}") def downgrade(): diff --git a/migrations/versions/214_introduce_accounttransaction.py b/migrations/versions/214_introduce_accounttransaction.py index 1268c0868..77daa3543 100644 --- a/migrations/versions/214_introduce_accounttransaction.py +++ b/migrations/versions/214_introduce_accounttransaction.py @@ -75,7 +75,7 @@ def upgrade(): conn = op.get_bind() increment = (shard_id << 48) + 1 - conn.execute("ALTER TABLE accounttransaction AUTO_INCREMENT={}".format(increment)) + conn.execute(f"ALTER TABLE accounttransaction AUTO_INCREMENT={increment}") def downgrade(): diff --git a/migrations/versions/228_increase_gmailaccount_token_length.py b/migrations/versions/228_increase_gmailaccount_token_length.py index 88ded41f7..014cf4ddd 100644 --- a/migrations/versions/228_increase_gmailaccount_token_length.py +++ b/migrations/versions/228_increase_gmailaccount_token_length.py @@ -17,10 +17,8 @@ def upgrade(): op.alter_column("gmailaccount", "g_id_token", type_=sa.String(length=2048)) op.alter_column("gmailauthcredentials", "g_id_token", type_=sa.String(length=2048)) - pass def downgrade(): op.alter_column("gmailaccount", "g_id_token", type_=sa.String(length=1024)) op.alter_column("gmailauthcredentials", "g_id_token", type_=sa.String(length=1024)) - pass diff --git a/migrations/versions/239_server_default_created_at.py b/migrations/versions/239_server_default_created_at.py index 00a083af1..7f5c959dc 100644 --- a/migrations/versions/239_server_default_created_at.py +++ b/migrations/versions/239_server_default_created_at.py @@ -50,9 +50,7 @@ def upgrade(): for table in TABLES: conn.execute( text( - "ALTER TABLE `{}` MODIFY COLUMN `created_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP".format( - table - ) + f"ALTER TABLE `{table}` MODIFY COLUMN `created_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP" ) ) @@ -61,9 +59,5 @@ def downgrade(): conn = op.get_bind() for table in TABLES: conn.execute( - text( - "ALTER TABLE `{}` MODIFY COLUMN `created_at` DATETIME NOT NULL".format( - table - ) - ) + text(f"ALTER TABLE `{table}` MODIFY COLUMN `created_at` DATETIME NOT NULL") ) diff --git a/tests/api/base.py b/tests/api/base.py index 69537f270..1638781d0 100644 --- a/tests/api/base.py +++ b/tests/api/base.py @@ -11,7 +11,6 @@ def new_api_client(db, namespace): class TestAPIClient: - """Provide more convenient access to the API for testing purposes.""" def __init__(self, test_client, default_namespace_public_id): diff --git a/tests/api/test_drafts.py b/tests/api/test_drafts.py index 0d48e4f13..526bd282a 100644 --- a/tests/api/test_drafts.py +++ b/tests/api/test_drafts.py @@ -1,5 +1,6 @@ """Test local behavior for the drafts API. Doesn't test syncback or actual -sending.""" +sending. +""" import json import os from datetime import datetime @@ -393,7 +394,8 @@ def test_update_to_nonexistent_draft(api_client): def test_contacts_updated(api_client): """Tests that draft-contact associations are properly created and - updated.""" + updated. + """ draft = {"to": [{"email": "alice@example.com"}, {"email": "bob@example.com"}]} r = api_client.post_data("/drafts", draft) diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index ba767fa92..ab0550599 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -285,7 +285,8 @@ def test_strict_argument_parsing(api_client): def test_distinct_results(api_client, db, default_namespace): """Test that limit and offset parameters work correctly when joining on - multiple matching messages per thread.""" + multiple matching messages per thread. + """ # Create a thread with multiple messages on it. first_thread = add_fake_thread(db.session, default_namespace.id) add_fake_message( diff --git a/tests/api/test_messages.py b/tests/api/test_messages.py index be08c5977..08d833b53 100644 --- a/tests/api/test_messages.py +++ b/tests/api/test_messages.py @@ -303,8 +303,8 @@ def test_message_label_updates( db, api_client, default_account, api_version, custom_label ): """Check that you can update a message (optimistically or not), - and that the update is queued in the ActionLog.""" - + and that the update is queued in the ActionLog. + """ headers = dict() headers["Api-Version"] = api_version diff --git a/tests/api/test_sending.py b/tests/api/test_sending.py index 5cbef66eb..7a8a08560 100644 --- a/tests/api/test_sending.py +++ b/tests/api/test_sending.py @@ -993,9 +993,7 @@ def test_inline_html_image_send(patch_smtp, api_client, uploaded_file_ids): "/send", { "subject": "Inline image test", - "body": '
'.format( - file_id - ), + "body": f'
', "file_ids": [file_id], "to": [{"name": "Foo Bar", "email": "foobar@nylas.com"}], }, diff --git a/tests/api/test_threads.py b/tests/api/test_threads.py index 76b8beb61..e8dec0820 100644 --- a/tests/api/test_threads.py +++ b/tests/api/test_threads.py @@ -181,8 +181,8 @@ def test_thread_label_updates( db, api_client, default_account, api_version, custom_label ): """Check that you can update a message (optimistically or not), - and that the update is queued in the ActionLog.""" - + and that the update is queued in the ActionLog. + """ headers = dict() headers["Api-Version"] = api_version diff --git a/tests/api/test_views.py b/tests/api/test_views.py index 8cac6d087..d5fa49029 100644 --- a/tests/api/test_views.py +++ b/tests/api/test_views.py @@ -34,7 +34,8 @@ def test_resource_views( ): """Exercises various tests for views, mostly related to filtering. Note: this only tests views, it assumes the - resources are working as expected.""" + resources are working as expected. + """ # Folders don't work with GMail accounts, need generic IMAP if resource_name == "folders": api_client = new_api_client(db, generic_account.namespace) diff --git a/tests/conftest.py b/tests/conftest.py index 6cf3d81bf..c10df74e2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,4 @@ -""" Fixtures don't go here; see util/base.py and friends. """ +"""Fixtures don't go here; see util/base.py and friends.""" # Monkeypatch first, to prevent "AttributeError: 'module' object has no # attribute 'poll'" errors when tests import socket, then monkeypatch. from gevent import monkey diff --git a/tests/contacts/test_process_mail.py b/tests/contacts/test_process_mail.py index 6b4379072..40b40d521 100644 --- a/tests/contacts/test_process_mail.py +++ b/tests/contacts/test_process_mail.py @@ -1,5 +1,6 @@ """Sanity-check our logic for updating contact data from message addressees -during a sync.""" +during a sync. +""" from inbox.models import Contact from tests.util.base import add_fake_message diff --git a/tests/contacts/test_remote_sync.py b/tests/contacts/test_remote_sync.py index 262638fbc..5ecfb0dce 100644 --- a/tests/contacts/test_remote_sync.py +++ b/tests/contacts/test_remote_sync.py @@ -91,7 +91,8 @@ def test_deletes(contacts_provider, contact_sync, db): ) def test_auth_error_handling(contact_sync, default_account, db): """Test that the contact sync greenlet stops if account credentials are - invalid.""" + invalid. + """ # Give the default test account patently invalid OAuth credentials. default_account.refresh_token = "foo" db.session.commit() diff --git a/tests/general/test_concurrency.py b/tests/general/test_concurrency.py index bd66353fc..c60379f77 100644 --- a/tests/general/test_concurrency.py +++ b/tests/general/test_concurrency.py @@ -31,7 +31,6 @@ def __call__(self): time.sleep(self.delay) if self.call_count < self.max_executions: raise self.exc_type - return @pytest.mark.usefixtures("mock_gevent_sleep") diff --git a/tests/general/test_html_parsing.py b/tests/general/test_html_parsing.py index 542d63a33..282c6bf5d 100644 --- a/tests/general/test_html_parsing.py +++ b/tests/general/test_html_parsing.py @@ -25,7 +25,8 @@ def test_strip_tags(): def test_preserve_refs(): """Test that HTML character/entity references are preserved when we strip - tags.""" + tags. + """ text = "la philologie mène au pire" assert strip_tags(text) == "la philologie mène au pire" diff --git a/tests/general/test_mutable_json_type.py b/tests/general/test_mutable_json_type.py index bf6a4a4f2..c69d1ceb5 100644 --- a/tests/general/test_mutable_json_type.py +++ b/tests/general/test_mutable_json_type.py @@ -1,4 +1,4 @@ -""" Tests for our mutable JSON column type. """ +"""Tests for our mutable JSON column type.""" from datetime import datetime diff --git a/tests/general/test_relationships.py b/tests/general/test_relationships.py index b961da86b..2cd115a58 100644 --- a/tests/general/test_relationships.py +++ b/tests/general/test_relationships.py @@ -8,8 +8,8 @@ def test_category_delete(db, gmail_account): """Ensure that all associated MessageCategories are deleted - when a Category is deleted""" - + when a Category is deleted + """ api_client = new_api_client(db, gmail_account.namespace) po_data = api_client.post_data("/labels/", {"display_name": "Test_Label"}) assert po_data.status_code == 200 @@ -51,8 +51,8 @@ def test_category_delete(db, gmail_account): def test_message_delete(db, gmail_account): """Ensure that all associated MessageCategories are deleted - when a Message is deleted""" - + when a Message is deleted + """ api_client = new_api_client(db, gmail_account.namespace) generic_thread = add_fake_thread(db.session, gmail_account.namespace.id) @@ -91,8 +91,8 @@ def test_message_delete(db, gmail_account): def test_thread_delete(db, gmail_account): """Ensure that all associated Messages are deleted - when a Thread is deleted.""" - + when a Thread is deleted. + """ generic_thread = add_fake_thread(db.session, gmail_account.namespace.id) generic_message = add_fake_message( db.session, gmail_account.namespace.id, generic_thread diff --git a/tests/general/test_required_folders.py b/tests/general/test_required_folders.py index 650b89b6d..069083c02 100644 --- a/tests/general/test_required_folders.py +++ b/tests/general/test_required_folders.py @@ -1,4 +1,4 @@ -""" Test that the All Mail folder is enabled for Gmail. """ +"""Test that the All Mail folder is enabled for Gmail.""" import pytest from inbox.auth.google import GoogleAuthHandler diff --git a/tests/general/test_thread_creation.py b/tests/general/test_thread_creation.py index a9559ad7f..916d7475f 100644 --- a/tests/general/test_thread_creation.py +++ b/tests/general/test_thread_creation.py @@ -57,7 +57,8 @@ def test_generic_grouping(db, default_account): def test_threading_limit(db, folder_sync_engine, monkeypatch): """Test that custom threading doesn't produce arbitrarily long threads, - which eventually break things.""" + which eventually break things. + """ from inbox.models import Message, Thread # Shorten bound to make test faster diff --git a/tests/imap/data.py b/tests/imap/data.py index 7981d01aa..b1af22547 100644 --- a/tests/imap/data.py +++ b/tests/imap/data.py @@ -2,7 +2,8 @@ well as a mock IMAPClient isntance that can be used to deterministically test aspects of IMAP sync. See https://hypothesis.readthedocs.org/en/latest/data.html for more information -about how this works.""" +about how this works. +""" import string import flanker diff --git a/tests/imap/test_actions.py b/tests/imap/test_actions.py index 5adf458a9..64a3f1627 100644 --- a/tests/imap/test_actions.py +++ b/tests/imap/test_actions.py @@ -83,9 +83,7 @@ def test_draft_updates(db, default_account, mock_imapclient): assert len(all_uids) == 1 data = conn.uids(all_uids)[0] parsed = mime.from_string(data.body) - expected_message_id = "<{}-{}@mailer.nylas.com>".format( - draft.public_id, draft.version - ) + expected_message_id = f"<{draft.public_id}-{draft.version}@mailer.nylas.com>" assert parsed.headers.get("Message-Id") == expected_message_id # We're testing the draft deletion with Gmail here. However, diff --git a/tests/imap/test_crispin_client.py b/tests/imap/test_crispin_client.py index d2cf38a21..e88f5a41c 100644 --- a/tests/imap/test_crispin_client.py +++ b/tests/imap/test_crispin_client.py @@ -640,7 +640,6 @@ def test_gmail_folders_no_flags(monkeypatch): """ Tests that system folders (trash, inbox, sent) without flags can be labeled """ - folders = [ ((b"\\HasNoChildren",), b"/", "INBOX"), ((b"\\Noselect", b"\\HasChildren"), b"/", "[Gmail]"), @@ -679,7 +678,6 @@ def test_gmail_many_folders_one_role(monkeypatch, constants): i.e accounts with [Imap]/Trash, Trash, and [Gmail]/Trash should only have one folder with the role trash """ - # some duplitace folders where one has been flagged, # and neither have been flagged # in both cases, only one should come out flagged. diff --git a/tests/imap/test_delete_handling.py b/tests/imap/test_delete_handling.py index 67716baa7..d77c29043 100644 --- a/tests/imap/test_delete_handling.py +++ b/tests/imap/test_delete_handling.py @@ -62,7 +62,8 @@ def test_deleting_from_a_message_with_multiple_uids( db, default_account, message, thread ): """Check that deleting a imapuid from a message with - multiple uids doesn't mark the message for deletion.""" + multiple uids doesn't mark the message for deletion. + """ inbox_folder = Folder.find_or_create(db.session, default_account, "inbox", "inbox") sent_folder = Folder.find_or_create(db.session, default_account, "sent", "sent") diff --git a/tests/scheduling/test_syncback_logic.py b/tests/scheduling/test_syncback_logic.py index a3741a31e..a39bd057a 100644 --- a/tests/scheduling/test_syncback_logic.py +++ b/tests/scheduling/test_syncback_logic.py @@ -21,7 +21,7 @@ def purge_accounts_and_actions(): @pytest.fixture def patched_enginemanager(monkeypatch): - engines = {k: None for k in range(0, 6)} + engines = {k: None for k in range(6)} monkeypatch.setattr("inbox.ignition.engine_manager.engines", engines) yield monkeypatch.undo() diff --git a/tests/transactions/test_transaction_deletion.py b/tests/transactions/test_transaction_deletion.py index b7008219e..c92ac9a4e 100644 --- a/tests/transactions/test_transaction_deletion.py +++ b/tests/transactions/test_transaction_deletion.py @@ -57,7 +57,6 @@ def transactions(self, clear_redis, now, db, default_namespace): Yields the newest transaction """ - # Transactions created less than 30 days ago should not be deleted t0 = create_transaction(db, now, default_namespace.id) create_transaction(db, now - timedelta(days=29), default_namespace.id) @@ -88,8 +87,8 @@ def test_transaction_deletion_30_days(self, now, db, default_namespace): all_transactions = db.session.execute(query).scalar() date_query = ( "SELECT count(id) FROM transaction WHERE created_at < " - "DATE_SUB({}, INTERVAL 30 day)" - ).format(format_datetime(now)) + f"DATE_SUB({format_datetime(now)}, INTERVAL 30 day)" + ) older_than_thirty_days = db.session.execute(date_query).scalar() # Delete all transactions older than 30 days @@ -110,8 +109,8 @@ def test_transaction_deletion_one_day( date_query = ( "SELECT count(id) FROM transaction WHERE created_at < " - "DATE_SUB({}, INTERVAL 1 day)" - ).format(format_datetime(now)) + f"DATE_SUB({format_datetime(now)}, INTERVAL 1 day)" + ) older_than_one_day = db.session.execute(date_query).scalar() # Delete all transactions older than 1 day purge_transactions(shard_id, days_ago=1, dry_run=False, now=now) diff --git a/tests/util/base.py b/tests/util/base.py index 8da8d2edd..5bde6a260 100644 --- a/tests/util/base.py +++ b/tests/util/base.py @@ -152,7 +152,7 @@ def default_account(db, config, redis_mock): @fixture(scope="function") def default_namespace(db, default_account): - yield default_account.namespace + return default_account.namespace @fixture @@ -165,7 +165,7 @@ def outlook_account(db, config, redis_mock): @fixture def outlook_namespace(db, outlook_account): - yield outlook_account.namespace + return outlook_account.namespace @fixture(scope="function") @@ -178,12 +178,12 @@ def default_accounts(db, config, redis_mock): @fixture(scope="function") def default_namespaces(db, default_accounts): - yield [account.namespace for account in default_accounts] + return [account.namespace for account in default_accounts] @fixture(scope="function") def generic_account(db): - yield add_generic_imap_account(db.session) + return add_generic_imap_account(db.session) @fixture(scope="function")