diff --git a/scripts/get_learners_to_retire.py b/scripts/get_learners_to_retire.py deleted file mode 120000 index 58b8869f..00000000 --- a/scripts/get_learners_to_retire.py +++ /dev/null @@ -1 +0,0 @@ -../tubular/scripts/get_learners_to_retire.py \ No newline at end of file diff --git a/scripts/replace_usernames.py b/scripts/replace_usernames.py deleted file mode 120000 index 623b813b..00000000 --- a/scripts/replace_usernames.py +++ /dev/null @@ -1 +0,0 @@ -../tubular/scripts/replace_usernames.py \ No newline at end of file diff --git a/scripts/retire_one_learner.py b/scripts/retire_one_learner.py deleted file mode 120000 index 531d7c32..00000000 --- a/scripts/retire_one_learner.py +++ /dev/null @@ -1 +0,0 @@ -../tubular/scripts/retire_one_learner.py \ No newline at end of file diff --git a/scripts/retirement_archive_and_cleanup.py b/scripts/retirement_archive_and_cleanup.py deleted file mode 120000 index afa4d282..00000000 --- a/scripts/retirement_archive_and_cleanup.py +++ /dev/null @@ -1 +0,0 @@ -../tubular/scripts/retirement_archive_and_cleanup.py \ No newline at end of file diff --git a/scripts/retirement_bulk_status_update.py b/scripts/retirement_bulk_status_update.py deleted file mode 120000 index 7e6d844c..00000000 --- a/scripts/retirement_bulk_status_update.py +++ /dev/null @@ -1 +0,0 @@ -../tubular/scripts/retirement_bulk_status_update.py \ No newline at end of file diff --git a/scripts/retirement_partner_report.py b/scripts/retirement_partner_report.py deleted file mode 120000 index c1a01bbe..00000000 --- a/scripts/retirement_partner_report.py +++ /dev/null @@ -1 +0,0 @@ -../tubular/scripts/retirement_partner_report.py \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index ebec2525..3d3602d9 100644 --- a/setup.cfg +++ b/setup.cfg @@ -39,7 +39,6 @@ console_scripts = frontend_deploy.py = tubular.scripts.frontend_deploy:frontend_deploy frontend_multi_build.py = tubular.scripts.frontend_multi_build:frontend_build frontend_multi_deploy.py = tubular.scripts.frontend_multi_deploy:frontend_deploy - get_learners_to_retire.py = tubular.scripts.get_learners_to_retire:get_learners_to_retire get_ready_to_merge_prs.py = tubular.scripts.get_ready_to_merge_prs:get_ready_to_merge_prs jenkins_trigger_build.py = tubular.scripts.jenkins_trigger_build:trigger merge-approved-prs = tubular.scripts.merge_approved_prs:octomerge @@ -50,14 +49,9 @@ console_scripts = push_public_to_private.py = tubular.scripts.push_public_to_private:push_public_to_private purge_cloudflare_cache.py = tubular.scripts.purge_cloudflare_cache:purge_cloudflare_cache restrict_to_stage.py = tubular.scripts.restrict_to_stage:restrict_ami_to_stage - retire_one_learner.py = tubular.scripts.retire_one_learner:retire_learner - retirement_archive_and_cleanup.py = tubular.scripts.retirement_archive_and_cleanup:archive_and_cleanup - retirement_bulk_status_update.py = tubular.scripts.retirement_bulk_status_update:update_statuses - retirement_partner_report.py = tubular.scripts.retirement_partner_report:generate_report retrieve_latest_base_ami.py = tubular.scripts.retrieve_latest_base_ami:retrieve_latest_base_ami retrieve_base_ami.py = tubular.scripts.retrieve_base_ami:retrieve_base_ami rollback_asg.py = tubular.scripts.rollback_asg:rollback - structures.py = tubular.scripts.structures:cli submit_slack_msg.py = tubular.scripts.submit_slack_msg:submit_slack_msg [extras] diff --git a/tubular/amplitude_api.py b/tubular/amplitude_api.py deleted file mode 100644 index 5b31fb1d..00000000 --- a/tubular/amplitude_api.py +++ /dev/null @@ -1,92 +0,0 @@ -""" -Amplitude API class that is used to delete user from Amplitude. -""" -import logging -import requests -import json -import backoff -import os - -logger = logging.getLogger(__name__) -MAX_ATTEMPTS = int(os.environ.get("RETRY_MAX_ATTEMPTS", 5)) - - -class AmplitudeException(Exception): - """ - AmplitudeException will be raised there is fatal error and is not recoverable. - """ - pass - - -class AmplitudeRecoverableException(AmplitudeException): - """ - AmplitudeRecoverableException will be raised when request can be retryable. - """ - pass - - -class AmplitudeApi: - """ - Amplitude API is used to handle communication with Amplitude Api's. - """ - - def __init__(self, amplitude_api_key, amplitude_secret_key): - self.amplitude_api_key = amplitude_api_key - self.amplitude_secret_key = amplitude_secret_key - self.base_url = "https://amplitude.com/" - self.delete_user_path = "api/2/deletions/users" - - def auth(self): - """ - Returns auth credentials for Amplitude authorization. - - Returns: - Tuple: Returns authorization tuple. - """ - return (self.amplitude_api_key, self.amplitude_secret_key) - - - @backoff.on_exception( - backoff.expo, - AmplitudeRecoverableException, - max_tries = MAX_ATTEMPTS, - ) - def delete_user(self, user): - """ - This function send an API request to delete user from Amplitude. It then parse the response and - try again if it is recoverable. - - Returns: - None - - Args: - user (dict): raw data of user to delete. - - Raises: - AmplitudeException: if the error from amplitude is unrecoverable/unretryable. - AmplitudeRecoverableException: if the error from amplitude is recoverable/retryable. - """ - response = requests.post( - self.base_url + self.delete_user_path, - headers = {"Content-Type": "application/json"}, - json = { - "user_ids": [user["user"]["id"]], - 'ignore_invalid_id': 'true', # When true, the job ignores users that don't exist in the project. - "requester": "user-retirement-pipeline", - }, - auth = self.auth() - ) - - if response.status_code == 200: - logger.info("Amplitude user deletion succeeded") - return - - # We have some sort of error. Parse it, log it, and retry as needed. - error_msg = "Amplitude user deletion failed due to {reason}".format(reason=response.reason) - logger.error(error_msg) - # Status 429 is returned when there are too many requests and can be resolved in retrying sending - # request. - if response.status_code == 429 or 500 <= response.status_code < 600: - raise AmplitudeRecoverableException(error_msg) - else: - raise AmplitudeException(error_msg) diff --git a/tubular/braze_api.py b/tubular/braze_api.py deleted file mode 100644 index 247ceccc..00000000 --- a/tubular/braze_api.py +++ /dev/null @@ -1,85 +0,0 @@ -""" -Helper API classes for calling Braze APIs. -""" -import logging -import os - -import backoff -import requests - -LOG = logging.getLogger(__name__) -MAX_ATTEMPTS = int(os.environ.get('RETRY_BRAZE_MAX_ATTEMPTS', 5)) - - -class BrazeException(Exception): - pass - - -class BrazeRecoverableException(BrazeException): - pass - - -class BrazeApi: - """ - Braze API client used to make calls to Braze - """ - - def __init__(self, braze_api_key, braze_instance): - self.api_key = braze_api_key - - # https://www.braze.com/docs/api/basics/#endpoints - self.base_url = 'https://rest.{instance}.braze.com'.format(instance=braze_instance) - - def auth_headers(self): - """Returns authorization headers suitable for passing to the requests library""" - return { - 'Authorization': 'Bearer ' + self.api_key, - } - - @staticmethod - def get_error_message(response): - """Returns a string suitable for logging""" - try: - json = response.json() - except ValueError: - json = {} - - # https://www.braze.com/docs/api/errors - message = json.get('message') - - return message or response.reason - - def process_response(self, response, action): - """Log response status and raise an error as needed""" - if response.ok: - LOG.info('Braze {action} succeeded'.format(action=action)) - return - - # We have some sort of error. Parse it, log it, and retry as needed. - error_msg = 'Braze {action} failed due to {msg}'.format(action=action, msg=self.get_error_message(response)) - LOG.error(error_msg) - - if response.status_code == 429 or 500 <= response.status_code < 600: - raise BrazeRecoverableException(error_msg) - else: - raise BrazeException(error_msg) - - @backoff.on_exception( - backoff.expo, - BrazeRecoverableException, - max_tries=MAX_ATTEMPTS, - ) - def delete_user(self, learner): - """ - Delete a learner from Braze. - """ - # https://www.braze.com/docs/help/gdpr_compliance/#the-right-to-erasure - # https://www.braze.com/docs/api/endpoints/user_data/post_user_delete - response = requests.post( - self.base_url + '/users/delete', - headers=self.auth_headers(), - json={ - 'external_ids': [learner['user']['id']], # Braze external ids are LMS user ids - }, - ) - self.process_response(response, 'user deletion') diff --git a/tubular/edx_api.py b/tubular/edx_api.py index 8ff9f612..1ebbaf76 100644 --- a/tubular/edx_api.py +++ b/tubular/edx_api.py @@ -189,73 +189,6 @@ class LmsApi(BaseApiClient): """ LMS API client with convenience methods for making API calls. """ - @_retry_lms_api() - def learners_to_retire(self, states_to_request, cool_off_days=7, limit=None): - """ - Retrieves a list of learners awaiting retirement actions. - """ - params = { - 'cool_off_days': cool_off_days, - 'states': states_to_request - } - if limit: - params['limit'] = limit - api_url = self.get_api_url('api/user/v1/accounts/retirement_queue') - return self._request('GET', api_url, params=params) - - @_retry_lms_api() - def get_learners_by_date_and_status(self, state_to_request, start_date, end_date): - """ - Retrieves a list of learners in the given retirement state that were - created in the retirement queue between the dates given. Date range - is inclusive, so to get one day you would set both dates to that day. - - :param state_to_request: String LMS UserRetirementState state name (ex. COMPLETE) - :param start_date: Date or Datetime object - :param end_date: Date or Datetime - """ - params = { - 'start_date': start_date.strftime('%Y-%m-%d'), - 'end_date': end_date.strftime('%Y-%m-%d'), - 'state': state_to_request - } - api_url = self.get_api_url('api/user/v1/accounts/retirements_by_status_and_date') - return self._request('GET', api_url, params=params) - - @_retry_lms_api() - def get_learner_retirement_state(self, username): - """ - Retrieves the given learner's retirement state. - """ - api_url = self.get_api_url(f'api/user/v1/accounts/{username}/retirement_status') - return self._request('GET', api_url) - - @_retry_lms_api() - def update_learner_retirement_state(self, username, new_state_name, message, force=False): - """ - Updates the given learner's retirement state to the retirement state name new_string - with the additional string information in message (for logging purposes). - """ - data = { - 'username': username, - 'new_state': new_state_name, - 'response': message - } - - if force: - data['force'] = True - - api_url = self.get_api_url('api/user/v1/accounts/update_retirement_status') - return self._request('PATCH', api_url, json=data) - - @_retry_lms_api() - def retirement_deactivate_logout(self, learner): - """ - Performs the user deactivation and forced logout step of learner retirement - """ - data = {'username': learner['original_username']} - api_url = self.get_api_url('api/user/v1/accounts/deactivate_logout') - return self._request('POST', api_url, json=data) @_retry_lms_api() def retirement_retire_forum(self, learner): @@ -289,26 +222,6 @@ def retirement_unenroll(self, learner): api_url = self.get_api_url('api/enrollment/v1/unenroll') return self._request('POST', api_url, json=data) - # This endpoint additionally returns 500 when the EdxNotes backend service is unavailable. - @_retry_lms_api() - def retirement_retire_notes(self, learner): - """ - Deletes all the user's notes (aka. annotations) - """ - data = {'username': learner['original_username']} - api_url = self.get_api_url('api/edxnotes/v1/retire_user') - return self._request('POST', api_url, json=data) - - @_retry_lms_api() - def retirement_lms_retire_misc(self, learner): - """ - Deletes, blanks, or one-way hashes personal information in LMS as - defined in EDUCATOR-2802 and sub-tasks. - """ - data = {'username': learner['original_username']} - api_url = self.get_api_url('api/user/v1/accounts/retire_misc') - return self._request('POST', api_url, json=data) - @_retry_lms_api() def retirement_lms_retire(self, learner): """ @@ -318,15 +231,6 @@ def retirement_lms_retire(self, learner): api_url = self.get_api_url('api/user/v1/accounts/retire') return self._request('POST', api_url, json=data) - @_retry_lms_api() - def retirement_partner_queue(self, learner): - """ - Calls LMS to add the given user to the retirement reporting queue - """ - data = {'username': learner['original_username']} - api_url = self.get_api_url('api/user/v1/accounts/retirement_partner_report') - return self._request('PUT', api_url, json=data) - @_retry_lms_api() def retirement_partner_report(self): """ @@ -344,56 +248,6 @@ def retirement_partner_cleanup(self, usernames): api_url = self.get_api_url('api/user/v1/accounts/retirement_partner_report_cleanup') return self._request('POST', api_url, json=usernames) - @_retry_lms_api() - def retirement_retire_proctoring_data(self, learner): - """ - Deletes or hashes learner data from edx-proctoring - """ - api_url = self.get_api_url(f"api/edx_proctoring/v1/retire_user/{learner['user']['id']}") - return self._request('POST', api_url) - - @_retry_lms_api() - def retirement_retire_proctoring_backend_data(self, learner): - """ - Removes the given learner from 3rd party proctoring backends - """ - api_url = self.get_api_url(f"api/edx_proctoring/v1/retire_backend_user/{learner['user']['id']}") - return self._request('POST', api_url) - - @_retry_lms_api() - def bulk_cleanup_retirements(self, usernames): - """ - Deletes the retirements for all given usernames - """ - data = {'usernames': usernames} - api_url = self.get_api_url('api/user/v1/accounts/retirement_cleanup') - return self._request('POST', api_url, json=data) - - def replace_lms_usernames(self, username_mappings): - """ - Calls LMS API to replace usernames. - - Param: - username_mappings: list of dicts where key is current username and value is new desired username - [{current_un_1: desired_un_1}, {current_un_2: desired_un_2}] - """ - data = {"username_mappings": username_mappings} - api_url = self.get_api_url('api/user/v1/accounts/replace_usernames') - return self._request('POST', api_url, json=data) - - def replace_forums_usernames(self, username_mappings): - """ - Calls the discussion forums API inside of LMS to replace usernames. - - Param: - username_mappings: list of dicts where key is current username and value is new unique username - [{current_un_1: new_un_1}, {current_un_2: new_un_2}] - """ - data = {"username_mappings": username_mappings} - api_url = self.get_api_url('api/discussion/v1/accounts/replace_usernames') - return self._request('POST', api_url, json=data) - - class EcommerceApi(BaseApiClient): """ Ecommerce API client with convenience methods for making API calls. @@ -407,28 +261,6 @@ def retire_learner(self, learner): api_url = self.get_api_url('api/v2/user/retire') return self._request('POST', api_url, json=data) - @_retry_lms_api() - def get_tracking_key(self, learner): - """ - Fetches the ecommerce tracking id used for Segment tracking when - ecommerce doesn't have access to the LMS user id. - """ - api_url = self.get_api_url(f"api/v2/retirement/tracking_id/{learner['original_username']}") - return self._request('GET', api_url)['ecommerce_tracking_id'] - - def replace_usernames(self, username_mappings): - """ - Calls the ecommerce API to replace usernames. - - Param: - username_mappings: list of dicts where key is current username and value is new unique username - [{current_un_1: new_un_1}, {current_un_2: new_un_2}] - """ - data = {"username_mappings": username_mappings} - api_url = self.get_api_url('api/v2/user_management/replace_usernames') - return self._request('POST', api_url, json=data) - - class CredentialsApi(BaseApiClient): """ Credentials API client with convenience methods for making API calls. @@ -442,36 +274,6 @@ def retire_learner(self, learner): api_url = self.get_api_url('user/retire') return self._request('POST', api_url, json=data) - def replace_usernames(self, username_mappings): - """ - Calls the credentials API to replace usernames. - - Param: - username_mappings: list of dicts where key is current username and value is new unique username - [{current_un_1: new_un_1}, {current_un_2: new_un_2}] - """ - data = {"username_mappings": username_mappings} - api_url = self.get_api_url('api/v2/replace_usernames') - return self._request('POST', api_url, json=data) - - -class DiscoveryApi(BaseApiClient): - """ - Discovery API client with convenience methods for making API calls. - """ - - def replace_usernames(self, username_mappings): - """ - Calls the discovery API to replace usernames. - - Param: - username_mappings: list of dicts where key is current username and value is new unique username - [{current_un_1: new_un_1}, {current_un_2: new_un_2}] - """ - data = {"username_mappings": username_mappings} - api_url = self.get_api_url('api/v1/replace_usernames') - return self._request('POST', api_url, json=data) - class DemographicsApi(BaseApiClient): """ diff --git a/tubular/hubspot_api.py b/tubular/hubspot_api.py deleted file mode 100644 index c196a8b9..00000000 --- a/tubular/hubspot_api.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -Helper API classes for calling Hubspot APIs. -""" -import os -import logging - -import backoff -import requests - -from tubular.tubular_email import send_email - -LOG = logging.getLogger(__name__) -MAX_ATTEMPTS = int(os.environ.get('RETRY_HUBSPOT_MAX_ATTEMPTS', 5)) - -GET_VID_FROM_EMAIL_URL_TEMPLATE = "https://api.hubapi.com/contacts/v1/contact/email/{email}/profile" -DELETE_USER_FROM_VID_TEMPLATE = "https://api.hubapi.com/contacts/v1/contact/vid/{vid}" - - -class HubspotException(Exception): - pass - - -class HubspotAPI: - """ - Hubspot API client used to make calls to Hubspot - """ - - def __init__( - self, - hubspot_api_key, - aws_region, - from_address, - alert_email - ): - self.api_key = hubspot_api_key - self.aws_region = aws_region - self.from_address = from_address - self.alert_email = alert_email - - @backoff.on_exception( - backoff.expo, - HubspotException, - max_tries=MAX_ATTEMPTS - ) - def delete_user(self, learner): - """ - Delete a learner from hubspot using their email address. - """ - email = learner.get('original_email', None) - if not email: - raise TypeError('Expected an email address for user to delete, but received None.') - - user_vid = self.get_user_vid(email) - if user_vid: - self.delete_user_by_vid(user_vid) - - def delete_user_by_vid(self, vid): - """ - Delete a learner from hubspot using their Hubspot `vid` (unique identifier) - """ - headers = { - 'content-type': 'application/json', - 'authorization': f'Bearer {self.api_key}' - } - - req = requests.delete(DELETE_USER_FROM_VID_TEMPLATE.format( - vid=vid - ), headers=headers) - error_msg = "" - if req.status_code == 200: - LOG.info("User successfully deleted from Hubspot") - self.send_marketing_alert(vid) - elif req.status_code == 401: - error_msg = "Hubspot user deletion failed due to authorized API call" - elif req.status_code == 404: - error_msg = "Hubspot user deletion failed because vid doesn't match user" - elif req.status_code == 500: - error_msg = "Hubspot user deletion failed due to server-side (Hubspot) issues" - else: - error_msg = "Hubspot user deletion failed due to unknown reasons" - - if error_msg: - LOG.error(error_msg) - raise HubspotException(error_msg) - - def get_user_vid(self, email): - """ - Get a user's `vid` from Hubspot. `vid` is the terminology that hubspot uses for a user ids - """ - headers = { - 'content-type': 'application/json', - 'authorization': f'Bearer {self.api_key}' - } - - req = requests.get(GET_VID_FROM_EMAIL_URL_TEMPLATE.format( - email=email - ), headers=headers) - if req.status_code == 200: - req_data = req.json() - return req_data.get('vid') - elif req.status_code == 404: - LOG.info("No action taken because no user was found in Hubspot.") - return - else: - error_msg = "Error attempted to get user_vid from Hubspot. Error: {}".format( - req.text - ) - LOG.error(error_msg) - raise HubspotException(error_msg) - - def send_marketing_alert(self, vid): - """ - Notify marketing with user's Hubspot `vid` upon successful deletion. - """ - subject = "Alert: Hubspot Deletion" - body = "Learner with the VID \"{}\" has been deleted from Hubspot.".format(vid) - send_email( - self.aws_region, - self.from_address, - [self.alert_email], - subject, - body - ) diff --git a/tubular/scripts/get_learners_to_retire.py b/tubular/scripts/get_learners_to_retire.py deleted file mode 100755 index ef36008b..00000000 --- a/tubular/scripts/get_learners_to_retire.py +++ /dev/null @@ -1,105 +0,0 @@ -#! /usr/bin/env python3 - -""" -Command-line script to retrieve list of learners that have requested to be retired. -The script calls the appropriate LMS endpoint to get this list of learners. -""" - -from os import path -import io -import sys -import logging -import click -import yaml - -# Add top-level module path to sys.path before importing tubular code. -sys.path.append(path.dirname(path.dirname(path.abspath(__file__)))) - -from tubular.edx_api import LmsApi # pylint: disable=wrong-import-position -from tubular.jenkins import export_learner_job_properties # pylint: disable=wrong-import-position - -logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) -LOG = logging.getLogger(__name__) - - -@click.command("get_learners_to_retire") -@click.option( - '--config_file', - help='File in which YAML config exists that overrides all other params.' -) -@click.option( - '--cool_off_days', - help='Number of days a learner should be in the retirement queue before being actually retired.', - default=7 -) -@click.option( - '--output_dir', - help="Directory in which to write the Jenkins properties files.", - default='./jenkins_props' -) -@click.option( - '--user_count_error_threshold', - help="If more users than this number are returned we will error out instead of retiring. This is a failsafe" - "against attacks that somehow manage to add users to the retirement queue.", - default=300 -) -@click.option( - '--max_user_batch_size', - help="This setting will only get at most X number of users. If this number is lower than the user_count_error_threshold" - "setting then it will not error.", - default=200 -) -def get_learners_to_retire(config_file, - cool_off_days, - output_dir, - user_count_error_threshold, - max_user_batch_size): - """ - Retrieves a JWT token as the retirement service user, then calls the LMS - endpoint to retrieve the list of learners awaiting retirement. - """ - if not config_file: - click.echo('A config file is required.') - sys.exit(-1) - - with io.open(config_file, 'r') as config: - config_yaml = yaml.safe_load(config) - - user_count_error_threshold = int(user_count_error_threshold) - cool_off_days = int(cool_off_days) - - client_id = config_yaml['client_id'] - client_secret = config_yaml['client_secret'] - lms_base_url = config_yaml['base_urls']['lms'] - retirement_pipeline = config_yaml['retirement_pipeline'] - end_states = [state[1] for state in retirement_pipeline] - states_to_request = ['PENDING'] + end_states - - api = LmsApi(lms_base_url, lms_base_url, client_id, client_secret) - - # Retrieve the learners to retire and export them to separate Jenkins property files. - learners_to_retire = api.learners_to_retire(states_to_request, cool_off_days, max_user_batch_size) - if max_user_batch_size: - learners_to_retire = learners_to_retire[:max_user_batch_size] - learners_to_retire_cnt = len(learners_to_retire) - - if learners_to_retire_cnt > user_count_error_threshold: - click.echo( - 'Too many learners to retire! Expected {} or fewer, got {}!'.format( - user_count_error_threshold, - learners_to_retire_cnt - ) - ) - sys.exit(-1) - - export_learner_job_properties( - learners_to_retire, - output_dir - ) - - -if __name__ == "__main__": - # pylint: disable=unexpected-keyword-arg, no-value-for-parameter - # If using env vars to provide params, prefix them with "RETIREMENT_", e.g. RETIREMENT_CLIENT_ID - get_learners_to_retire(auto_envvar_prefix='RETIREMENT') - diff --git a/tubular/scripts/helpers.py b/tubular/scripts/helpers.py index 06e0c4b1..cf720ae1 100644 --- a/tubular/scripts/helpers.py +++ b/tubular/scripts/helpers.py @@ -23,11 +23,6 @@ from tubular.edx_api import CredentialsApi, DemographicsApi, EcommerceApi, LicenseManagerApi, \ LmsApi # pylint: disable=wrong-import-position -from tubular.braze_api import BrazeApi # pylint: disable=wrong-import-position -from tubular.segment_api import SegmentApi # pylint: disable=wrong-import-position -from tubular.salesforce_api import SalesforceApi # pylint: disable=wrong-import-position -from tubular.hubspot_api import HubspotAPI # pylint: disable=wrong-import-position -from tubular.amplitude_api import AmplitudeApi # pylint: disable=wrong-import-position def _log(kind, message): @@ -151,35 +146,15 @@ def _setup_all_apis_or_exit(fail_func, fail_code, config): lms_base_url = config['base_urls']['lms'] ecommerce_base_url = config['base_urls'].get('ecommerce', None) credentials_base_url = config['base_urls'].get('credentials', None) - segment_base_url = config['base_urls'].get('segment', None) demographics_base_url = config['base_urls'].get('demographics', None) license_manager_base_url = config['base_urls'].get('license_manager', None) client_id = config['client_id'] client_secret = config['client_secret'] - braze_api_key = config.get('braze_api_key', None) - braze_instance = config.get('braze_instance', None) - amplitude_api_key = config.get('amplitude_api_key', None) - amplitude_secret_key = config.get('amplitude_secret_key', None) - salesforce_user = config.get('salesforce_user', None) - salesforce_password = config.get('salesforce_password', None) - salesforce_token = config.get('salesforce_token', None) - salesforce_domain = config.get('salesforce_domain', None) - salesforce_assignee = config.get('salesforce_assignee', None) - segment_auth_token = config.get('segment_auth_token', None) - segment_workspace_slug = config.get('segment_workspace_slug', None) - hubspot_api_key = config.get('hubspot_api_key', None) - hubspot_aws_region = config.get('hubspot_aws_region', None) - hubspot_from_address = config.get('hubspot_from_address', None) - hubspot_alert_email = config.get('hubspot_alert_email', None) for state in config['retirement_pipeline']: for service, service_url in ( - ('BRAZE', braze_api_key), - ('AMPLITUDE', amplitude_api_key), ('ECOMMERCE', ecommerce_base_url), ('CREDENTIALS', credentials_base_url), - ('SEGMENT', segment_base_url), - ('HUBSPOT', hubspot_api_key), ('DEMOGRAPHICS', demographics_base_url) ): if state[2] == service and service_url is None: @@ -187,35 +162,6 @@ def _setup_all_apis_or_exit(fail_func, fail_code, config): config['LMS'] = LmsApi(lms_base_url, lms_base_url, client_id, client_secret) - if braze_api_key: - config['BRAZE'] = BrazeApi( - braze_api_key, - braze_instance, - ) - - if amplitude_api_key and amplitude_secret_key: - config['AMPLITUDE'] = AmplitudeApi( - amplitude_api_key, - amplitude_secret_key, - ) - - if salesforce_user and salesforce_password and salesforce_token: - config['SALESFORCE'] = SalesforceApi( - salesforce_user, - salesforce_password, - salesforce_token, - salesforce_domain, - salesforce_assignee - ) - - if hubspot_api_key: - config['HUBSPOT'] = HubspotAPI( - hubspot_api_key, - hubspot_aws_region, - hubspot_from_address, - hubspot_alert_email - ) - if ecommerce_base_url: config['ECOMMERCE'] = EcommerceApi(lms_base_url, ecommerce_base_url, client_id, client_secret) @@ -233,11 +179,5 @@ def _setup_all_apis_or_exit(fail_func, fail_code, config): client_secret, ) - if segment_base_url: - config['SEGMENT'] = SegmentApi( - segment_base_url, - segment_auth_token, - segment_workspace_slug - ) except Exception as exc: # pylint: disable=broad-except fail_func(fail_code, 'Unexpected error occurred!', exc) diff --git a/tubular/scripts/replace_usernames.py b/tubular/scripts/replace_usernames.py deleted file mode 100644 index b3913fda..00000000 --- a/tubular/scripts/replace_usernames.py +++ /dev/null @@ -1,147 +0,0 @@ -#! /usr/bin/env python3 - -""" -Command-line script to replace the usernames for all passed in learners. -Accepts a list of current usernames and their preferred new username. This -script will call LMS first which generates a unique username if the passed in -new username is not unique. It then calls all other services to replace the -username in their DBs. - -""" - -from os import path -import csv -import io -import sys -import logging -import click -import yaml - -# Add top-level module path to sys.path before importing tubular code. -sys.path.append(path.dirname(path.dirname(path.abspath(__file__)))) - -from tubular.edx_api import CredentialsApi, DiscoveryApi, EcommerceApi, LmsApi # pylint: disable=wrong-import-position - -logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) -LOG = logging.getLogger(__name__) - - -def write_responses(writer, replacements, status): - for replacement in replacements: - original_username = list(replacement.keys())[0] - new_username = list(replacement.values())[0] - writer.writerow([original_username, new_username, status]) - - -@click.command("replace_usernames") -@click.option( - '--config_file', - help='File in which YAML config exists that overrides all other params.' -) -@click.option( - '--username_replacement_csv', - help='File in which YAML config exists that overrides all other params.' -) -def replace_usernames(config_file, username_replacement_csv): - """ - Retrieves a JWT token as the retirement service user, then calls the LMS - endpoint to retrieve the list of learners awaiting retirement. - - Config file example: - ``` - client_id: xxx - client_secret: xxx - base_urls: - lms: http://localhost:18000 - ecommerce: http://localhost:18130 - discovery: http://localhost:18381 - credentials: http://localhost:18150 - ``` - - Username file example: - ``` - current_un_1,desired_un_1 - current_un_2,desired_un_2, - current_un_3,desired_un_3 - ``` - """ - if not config_file: - click.echo('A config file is required.') - sys.exit(-1) - - if not username_replacement_csv: - click.echo('A username replacement CSV file is required') - sys.exit(-1) - - with io.open(config_file, 'r') as config: - config_yaml = yaml.safe_load(config) - - with io.open(username_replacement_csv, 'r') as replacement_file: - csv_reader = csv.reader(replacement_file) - lms_username_mappings = [ - {current_username: desired_username} - for (current_username, desired_username) - in csv_reader - ] - - client_id = config_yaml['client_id'] - client_secret = config_yaml['client_secret'] - lms_base_url = config_yaml['base_urls']['lms'] - ecommerce_base_url = config_yaml['base_urls']['ecommerce'] - discovery_base_url = config_yaml['base_urls']['discovery'] - credentials_base_url = config_yaml['base_urls']['credentials'] - - # Note that though partially_failed sounds better than completely_failed, - # it's actually worse since the user is not consistant across DBs. - # Partially failed username replacements will need to be triaged so the - # user isn't in a broken state - successful_replacements = [] - partially_failed_replacements = [] - fully_failed_replacements = [] - - lms_api = LmsApi(lms_base_url, lms_base_url, client_id, client_secret) - ecommerce_api = EcommerceApi(lms_base_url, ecommerce_base_url, client_id, client_secret) - discovery_api = DiscoveryApi(lms_base_url, discovery_base_url, client_id, client_secret) - credentials_api = CredentialsApi(lms_base_url, credentials_base_url, client_id, client_secret) - - # Call LMS with current and desired usernames - response = lms_api.replace_lms_usernames(lms_username_mappings) - fully_failed_replacements += response['failed_replacements'] - in_progress_replacements = response['successful_replacements'] - - # Step through each services endpoints with the list returned from LMS. - # The LMS list has already verified usernames and made any duplicate - # usernames unique (e.g. 'matt' => 'mattf56a'). We pass successful - # replacements onto the next service and store all failed replacments. - replacement_methods = [ - ecommerce_api.replace_usernames, - discovery_api.replace_usernames, - credentials_api.replace_usernames, - lms_api.replace_forums_usernames, - ] - # Iterate through the endpoints above and if the APIs return any failures - # capture these in partially_failed_replacements. Only successfuly - # replacements will continue to be passed to the next service. - for replacement_method in replacement_methods: - response = replacement_method(in_progress_replacements) - partially_failed_replacements += response['failed_replacements'] - in_progress_replacements = response['successful_replacements'] - - successful_replacements = in_progress_replacements - - with open('username_replacement_results.csv', 'w', newline='') as output_file: - csv_writer = csv.writer(output_file) - # Write header - csv_writer.writerow(['Original Username', 'New Username', 'Status']) - write_responses(csv_writer, successful_replacements, "SUCCESS") - write_responses(csv_writer, partially_failed_replacements, "PARTIALLY FAILED") - write_responses(csv_writer, fully_failed_replacements, "FAILED") - - if partially_failed_replacements or fully_failed_replacements: - sys.exit(-1) - - -if __name__ == "__main__": - # pylint: disable=unexpected-keyword-arg, no-value-for-parameter - # If using env vars to provide params, prefix them with "RETIREMENT_", e.g. RETIREMENT_CLIENT_ID - replace_usernames(auto_envvar_prefix='USERNAME_REPLACEMENT') diff --git a/tubular/scripts/retire_one_learner.py b/tubular/scripts/retire_one_learner.py deleted file mode 100755 index 45232c4d..00000000 --- a/tubular/scripts/retire_one_learner.py +++ /dev/null @@ -1,224 +0,0 @@ -#! /usr/bin/env python3 -""" -Command-line script to drive the user retirement workflow for a single user - -To run this script you will need a username to run against and a YAML config file in the format: - -client_id: -client_secret: -base_urls: - lms: http://localhost:18000/ - ecommerce: http://localhost:18130/ - credentials: http://localhost:18150/ - demographics: http://localhost:18360/ -retirement_pipeline: - - ['RETIRING_CREDENTIALS', 'CREDENTIALS_COMPLETE', 'CREDENTIALS', 'retire_learner'] - - ['RETIRING_ECOM', 'ECOM_COMPLETE', 'ECOMMERCE', 'retire_learner'] - - ['RETIRING_DEMOGRAPHICS', 'DEMOGRAPHICS_COMPLETE', 'DEMOGRAPHICS', 'retire_learner'] - - ['RETIRING_LICENSE_MANAGER', 'LICENSE_MANAGER_COMPLETE', 'LICENSE_MANAGER', 'retire_learner'] - - ['RETIRING_FORUMS', 'FORUMS_COMPLETE', 'LMS', 'retirement_retire_forum'] - - ['RETIRING_EMAIL_LISTS', 'EMAIL_LISTS_COMPLETE', 'LMS', 'retirement_retire_mailings'] - - ['RETIRING_ENROLLMENTS', 'ENROLLMENTS_COMPLETE', 'LMS', 'retirement_unenroll'] - - ['RETIRING_LMS', 'LMS_COMPLETE', 'LMS', 'retirement_lms_retire'] -""" - -import logging -import sys -from functools import partial -from os import path -from time import time - -import click - -# Add top-level module path to sys.path before importing tubular code. -sys.path.append(path.dirname(path.dirname(path.abspath(__file__)))) - -from tubular.exception import HttpDoesNotExistException -# pylint: disable=wrong-import-position -from tubular.scripts.helpers import ( - _config_or_exit, - _fail, - _fail_exception, - _get_error_str_from_exception, - _log, - _setup_all_apis_or_exit -) - -# Return codes for various fail cases -ERR_SETUP_FAILED = -1 -ERR_USER_AT_END_STATE = -2 -ERR_USER_IN_WORKING_STATE = -3 -ERR_WHILE_RETIRING = -4 -ERR_BAD_LEARNER = -5 -ERR_UNKNOWN_STATE = -6 -ERR_BAD_CONFIG = -7 - -SCRIPT_SHORTNAME = 'Learner Retirement' -LOG = partial(_log, SCRIPT_SHORTNAME) -FAIL = partial(_fail, SCRIPT_SHORTNAME) -FAIL_EXCEPTION = partial(_fail_exception, SCRIPT_SHORTNAME) -CONFIG_OR_EXIT = partial(_config_or_exit, FAIL_EXCEPTION, ERR_BAD_CONFIG) -SETUP_ALL_APIS_OR_EXIT = partial(_setup_all_apis_or_exit, FAIL_EXCEPTION, ERR_SETUP_FAILED) - -logging.basicConfig(stream=sys.stdout, level=logging.INFO) - -# "Magic" states with special meaning, these are required to be in LMS -START_STATE = 'PENDING' -ERROR_STATE = 'ERRORED' -COMPLETE_STATE = 'COMPLETE' -ABORTED_STATE = 'ABORTED' -END_STATES = (ERROR_STATE, ABORTED_STATE, COMPLETE_STATE) - -# We'll store the access token here once retrieved -AUTH_HEADER = {} - - -def _get_learner_state_index_or_exit(learner, config): - """ - Returns the index in the ALL_STATES retirement state list, validating that it is in - an appropriate state to work on. - """ - try: - learner_state = learner['current_state']['state_name'] - learner_state_index = config['all_states'].index(learner_state) - - if learner_state in END_STATES: - FAIL(ERR_USER_AT_END_STATE, 'User already in end state: {}'.format(learner_state)) - - if learner_state in config['working_states']: - FAIL(ERR_USER_IN_WORKING_STATE, 'User is already in a working state! {}'.format(learner_state)) - - return learner_state_index - except KeyError: - FAIL(ERR_BAD_LEARNER, 'Bad learner response missing current_state or state_name: {}'.format(learner)) - except ValueError: - FAIL(ERR_UNKNOWN_STATE, 'Unknown learner retirement state for learner: {}'.format(learner)) - - -def _config_retirement_pipeline(config): - """ - Organizes the pipeline and populate the various state types - """ - # List of states where an API call is currently in progress - retirement_pipeline = config['retirement_pipeline'] - config['working_states'] = [state[0] for state in retirement_pipeline] - - # Create the full list of all of our states - config['all_states'] = [START_STATE] - for working in config['retirement_pipeline']: - config['all_states'].append(working[0]) - config['all_states'].append(working[1]) - for end in END_STATES: - config['all_states'].append(end) - - -def _get_learner_and_state_index_or_exit(config, username): - """ - Double-checks the current learner state, contacting LMS, and maps that state to its - index in the pipeline. Exits out if the learner is in an invalid state or not found - in LMS. - """ - try: - learner = config['LMS'].get_learner_retirement_state(username) - learner_state_index = _get_learner_state_index_or_exit(learner, config) - return learner, learner_state_index - except HttpDoesNotExistException: - FAIL(ERR_BAD_LEARNER, 'Learner {} not found. Please check that the learner is present in ' - 'UserRetirementStatus, is not already retired, ' - 'and is in an appropriate state to be acted upon.'.format(username)) - except Exception as exc: # pylint: disable=broad-except - FAIL_EXCEPTION(ERR_SETUP_FAILED, 'Unexpected error fetching user state!', str(exc)) - - -def _get_ecom_segment_id(config, learner): - """ - Calls Ecommerce to get the ecom-specific Segment tracking id that we need to retire. - This is only available from Ecommerce, unfortunately, and makes more sense to handle - here than to pass all of the config down to SegmentApi. - """ - try: - return config['ECOMMERCE'].get_tracking_key(learner) - except HttpDoesNotExistException: - LOG('Learner {} not found in Ecommerce. Setting Ecommerce Segment ID to None'.format(learner)) - return None - except Exception as exc: # pylint: disable=broad-except - FAIL_EXCEPTION(ERR_SETUP_FAILED, 'Unexpected error fetching Ecommerce tracking id!', str(exc)) - - -@click.command("retire_learner") -@click.option( - '--username', - help='The original username of the user to retire' -) -@click.option( - '--config_file', - help='File in which YAML config exists that overrides all other params.' -) -def retire_learner( - username, - config_file -): - """ - Retrieves a JWT token as the retirement service learner, then performs the retirement process as - defined in WORKING_STATE_ORDER - """ - LOG('Starting learner retirement for {} using config file {}'.format(username, config_file)) - - if not config_file: - FAIL(ERR_BAD_CONFIG, 'No config file passed in.') - - config = CONFIG_OR_EXIT(config_file) - _config_retirement_pipeline(config) - SETUP_ALL_APIS_OR_EXIT(config) - - learner, learner_state_index = _get_learner_and_state_index_or_exit(config, username) - - if config.get('fetch_ecommerce_segment_id', False): - learner['ecommerce_segment_id'] = _get_ecom_segment_id(config, learner) - - start_state = None - try: - for start_state, end_state, service, method in config['retirement_pipeline']: - # Skip anything that has already been done - if config['all_states'].index(start_state) < learner_state_index: - LOG('State {} completed in previous run, skipping'.format(start_state)) - continue - - LOG('Starting state {}'.format(start_state)) - - config['LMS'].update_learner_retirement_state(username, start_state, 'Starting: {}'.format(start_state)) - - # This does the actual API call - start_time = time() - response = getattr(config[service], method)(learner) - end_time = time() - - LOG('State {} completed in {} seconds'.format(start_state, end_time - start_time)) - - config['LMS'].update_learner_retirement_state( - username, - end_state, - 'Ending: {} with response:\n{}'.format(end_state, response) - ) - - learner_state_index += 1 - - LOG('Progressing to state {}'.format(end_state)) - - config['LMS'].update_learner_retirement_state(username, COMPLETE_STATE, 'Learner retirement complete.') - LOG('Retirement complete for learner {}'.format(username)) - except Exception as exc: # pylint: disable=broad-except - exc_msg = _get_error_str_from_exception(exc) - - try: - LOG('Error in retirement state {}: {}'.format(start_state, exc_msg)) - config['LMS'].update_learner_retirement_state(username, ERROR_STATE, exc_msg) - except Exception as update_exc: # pylint: disable=broad-except - LOG('Critical error attempting to change learner state to ERRORED: {}'.format(update_exc)) - - FAIL_EXCEPTION(ERR_WHILE_RETIRING, 'Error encountered in state "{}"'.format(start_state), exc) - - -if __name__ == '__main__': - # pylint: disable=unexpected-keyword-arg, no-value-for-parameter - retire_learner(auto_envvar_prefix='RETIREMENT') diff --git a/tubular/scripts/retirement_archive_and_cleanup.py b/tubular/scripts/retirement_archive_and_cleanup.py deleted file mode 100644 index 03ca93d4..00000000 --- a/tubular/scripts/retirement_archive_and_cleanup.py +++ /dev/null @@ -1,334 +0,0 @@ -#! /usr/bin/env python3 -""" -Command-line script to bulk archive and cleanup retired learners from LMS -""" - - -import datetime -import gzip -import json -import logging -import sys -import time -from functools import partial -from os import path - -import backoff -import boto3 -import click -from botocore.exceptions import BotoCoreError, ClientError -from six import text_type - -# Add top-level module path to sys.path before importing tubular code. -sys.path.append(path.dirname(path.dirname(path.abspath(__file__)))) - -# pylint: disable=wrong-import-position -from tubular.scripts.helpers import ( - _config_or_exit, _fail, _fail_exception, _log, _setup_lms_api_or_exit -) - - -SCRIPT_SHORTNAME = 'Archive and Cleanup' - -# Return codes for various fail cases -ERR_NO_CONFIG = -1 -ERR_BAD_CONFIG = -2 -ERR_FETCHING = -3 -ERR_ARCHIVING = -4 -ERR_DELETING = -5 -ERR_SETUP_FAILED = -5 -ERR_BAD_CLI_PARAM = -6 - -LOG = partial(_log, SCRIPT_SHORTNAME) -FAIL = partial(_fail, SCRIPT_SHORTNAME) -FAIL_EXCEPTION = partial(_fail_exception, SCRIPT_SHORTNAME) -CONFIG_OR_EXIT = partial(_config_or_exit, FAIL_EXCEPTION, ERR_BAD_CONFIG) -SETUP_LMS_OR_EXIT = partial(_setup_lms_api_or_exit, FAIL, ERR_SETUP_FAILED) - -DELAY = 10 - - -logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) -logging.getLogger('boto').setLevel(logging.INFO) - - -def _fetch_learners_to_archive_or_exit(config, start_date, end_date, initial_state): - """ - Makes the call to fetch learners to be cleaned up, returns the list of learners or exits. - """ - LOG('Fetching users in state {} created from {} to {}'.format(initial_state, start_date, end_date)) - try: - learners = config['LMS'].get_learners_by_date_and_status(initial_state, start_date, end_date) - LOG('Successfully fetched {} learners'.format(str(len(learners)))) - return learners - except Exception as exc: # pylint: disable=broad-except - FAIL_EXCEPTION(ERR_FETCHING, 'Unexpected error occurred fetching users to update!', exc) - - -def _batch_learners(learners=None, batch_size=None): - """ - To avoid potentially overwheling the LMS with a large number of user retirements to - delete, create a list of smaller batches of users to iterate over. This has the - added benefit of reducing the amount of user retirement archive requests that can - get into a bad state should this script experience an error. - - Args: - learners (list): List of learners to portion into smaller batches (lists) - batch_size (int): The number of learners to portion into each batch. If this - parameter is not supplied, this function will return one batch containing - all of the learners supplied to it. - """ - if batch_size: - return [ - learners[i:i+batch_size] for i, _ in list(enumerate(learners))[::batch_size] - ] - else: - return [learners] - - -def _on_s3_backoff(details): - """ - Callback that is called when backoff... backs off - """ - LOG("Backing off {wait:0.1f} seconds after {tries} tries calling function {target}".format(**details)) - - -@backoff.on_exception( - backoff.expo, - ( - ClientError, - BotoCoreError - ), - on_backoff=lambda details: _on_s3_backoff(details), # pylint: disable=unnecessary-lambda, - max_time=120, # 2 minutes -) -def _upload_to_s3(config, filename, dry_run=False): - """ - Upload the archive file to S3 - """ - try: - datestr = datetime.datetime.now().strftime('%Y/%m/') - s3 = boto3.resource('s3') - bucket_name = config['s3_archive']['bucket_name'] - # Dry runs of this script should only generate the retirement archive file, not push it to s3. - bucket = s3.Bucket(bucket_name) - key = 'raw/' + datestr + filename - if dry_run: - LOG('Dry run. Skipping the step to upload data to {}'.format(key)) - return - else: - bucket.upload_file(filename, key) - LOG('Successfully uploaded retirement data to {}'.format(key)) - except Exception as exc: - LOG(text_type(exc)) - raise - - -def _format_datetime_for_athena(timestamp): - """ - Takes a JSON serialized timestamp string and returns a format of it that is queryable as a datetime in Athena - """ - return timestamp.replace('T', ' ').rstrip('Z') - - -def _archive_retirements_or_exit(config, learners, dry_run=False): - """ - Creates an archive file with all of the retirements and uploads it to S3 - - The format of learners from LMS should be a list of these: - { - 'id': 46, # This is the UserRetirementStatus ID! - 'user': - { - 'id': 5213599, # THIS is the LMS User ID - 'username': 'retired__user_88ad587896920805c26041a2e75c767c75471ee9', - 'email': 'retired__user_d08919da55a0e03c032425567e4a33e860488a96@retired.invalid', - 'profile': - { - 'id': 2842382, - 'name': '' - } - }, - 'current_state': - { - 'id': 41, - 'state_name': 'COMPLETE', - 'state_execution_order': 13 - }, - 'last_state': { - 'id': 1, - 'state_name': 'PENDING', - 'state_execution_order': 1 - }, - 'created': '2018-10-18T20:35:52.349757Z', # This is the UserRetirementStatus creation date - 'modified': '2018-10-18T20:35:52.350050Z', # This is the UserRetirementStatus last touched date - 'original_username': 'retirement_test', - 'original_email': 'orig@foo.invalid', - 'original_name': 'Retirement Test', - 'retired_username': 'retired__user_88ad587896920805c26041a2e75c767c75471ee9', - 'retired_email': 'retired__user_d08919da55a0e03c032425567e4a33e860488a96@retired.invalid' - } - """ - LOG('Archiving retirements for {} learners to {}'.format(len(learners), config['s3_archive']['bucket_name'])) - try: - now = _get_utc_now() - filename = 'retirement_archive_{}.json.gz'.format(now.strftime('%Y_%d_%m_%H_%M_%S')) - LOG('Creating retirement archive file {}'.format(filename)) - - # The file format is one JSON object per line with the newline as a separator. This allows for - # easy queries via AWS Athena if we need to confirm learner deletion. - with gzip.open(filename, 'wt') as out: - for learner in learners: - user = { - 'user_id': learner['user']['id'], - 'original_username': learner['original_username'], - 'original_email': learner['original_email'], - 'original_name': learner['original_name'], - 'retired_username': learner['retired_username'], - 'retired_email': learner['retired_email'], - 'retirement_request_date': _format_datetime_for_athena(learner['created']), - 'last_modified_date': _format_datetime_for_athena(learner['modified']), - } - json.dump(user, out) - out.write("\n") - if dry_run: - LOG('Dry run. Logging the contents of {} for debugging'.format(filename)) - with gzip.open(filename, 'r') as archive_file: - for line in archive_file.readlines(): - LOG(line) - _upload_to_s3(config, filename, dry_run) - except Exception as exc: # pylint: disable=broad-except - FAIL_EXCEPTION(ERR_ARCHIVING, 'Unexpected error occurred archiving retirements!', exc) - - -def _cleanup_retirements_or_exit(config, learners): - """ - Bulk deletes the retirements for this run - """ - LOG('Cleaning up retirements for {} learners'.format(len(learners))) - try: - usernames = [l['original_username'] for l in learners] - config['LMS'].bulk_cleanup_retirements(usernames) - except Exception as exc: # pylint: disable=broad-except - FAIL_EXCEPTION(ERR_DELETING, 'Unexpected error occurred deleting retirements!', exc) - -def _get_utc_now(): - """ - Helper function only used to make unit test mocking/patching easier. - """ - return datetime.datetime.utcnow() - - -@click.command("archive_and_cleanup") -@click.option( - '--config_file', - help='YAML file that contains retirement-related configuration for this environment.' -) -@click.option( - '--cool_off_days', - help='Number of days a retirement should exist before being archived and deleted.', - type=int, - default=37 # 7 days before retirement, 30 after -) -@click.option( - '--dry_run', - help=''' - Should this script be run in a dry-run mode, in which generated retirement - archive files are not pushed to s3 and retirements are not cleaned up in the LMS - ''', - type=bool, - default=False -) -@click.option( - '--start_date', - help=''' - Start of window used to select user retirements for archival. Only user retirements - added to the retirement queue after this date will be processed. - ''', - type=click.DateTime(formats=['%Y-%m-%d']) -) -@click.option( - '--end_date', - help=''' - End of window used to select user retirments for archival. Only user retirments - added to the retirement queue before this date will be processed. In the case that - this date is more recent than the value specified in the `cool_off_days` parameter, - an error will be thrown. If this parameter is not used, the script will default to - using an end_date based upon the `cool_off_days` parameter. - ''', - type=click.DateTime(formats=['%Y-%m-%d']) -) -@click.option( - '--batch_size', - help='Number of user retirements to process', - type=int -) -def archive_and_cleanup(config_file, cool_off_days, dry_run, start_date, end_date, batch_size): - """ - Cleans up UserRetirementStatus rows in LMS by: - 1- Getting all rows currently in COMPLETE that were created --cool_off_days ago or more, - unless a specific timeframe is specified - 2- Archiving them to S3 in an Athena-queryable format - 3- Deleting them from LMS (by username) - """ - try: - LOG('Starting bulk update script: Config: {}'.format(config_file)) - - if not config_file: - FAIL(ERR_NO_CONFIG, 'No config file passed in.') - - config = CONFIG_OR_EXIT(config_file) - SETUP_LMS_OR_EXIT(config) - - if not start_date: - # This date is just a bogus "earliest possible value" since the call requires one - start_date = datetime.datetime.strptime('2018-01-01', '%Y-%m-%d') - if end_date: - if end_date > _get_utc_now() - datetime.timedelta(days=cool_off_days): - FAIL(ERR_BAD_CLI_PARAM, 'End date cannot occur within the cool_off_days period') - else: - # Set an end_date of `cool_off_days` days before the time that this script is run - end_date = _get_utc_now() - datetime.timedelta(days=cool_off_days) - - if start_date >= end_date: - FAIL(ERR_BAD_CLI_PARAM, 'Conflicting start and end dates passed on CLI') - - - LOG( - 'Fetching retirements for learners that have a COMPLETE status and were created ' - 'between {} and {}.'.format( - start_date, end_date - ) - ) - learners = _fetch_learners_to_archive_or_exit( - config, start_date, end_date, 'COMPLETE' - ) - - learners_to_process = _batch_learners(learners, batch_size) - num_batches = len(learners_to_process) - - if learners_to_process: - for index, batch in enumerate(learners_to_process): - LOG( - 'Processing batch {} out of {} of user retirement requests'.format( - str(index + 1), str(num_batches) - ) - ) - _archive_retirements_or_exit(config, batch, dry_run) - - if dry_run: - LOG('This is a dry-run. Exiting before any retirements are cleaned up') - else: - _cleanup_retirements_or_exit(config, batch) - LOG('Archive and cleanup complete for batch #{}'.format(str(index + 1))) - time.sleep(DELAY) - else: - LOG('No learners found!') - except Exception as exc: - LOG(text_type(exc)) - raise - - -if __name__ == '__main__': - # pylint: disable=unexpected-keyword-arg, no-value-for-parameter - archive_and_cleanup(auto_envvar_prefix='RETIREMENT') diff --git a/tubular/scripts/retirement_bulk_status_update.py b/tubular/scripts/retirement_bulk_status_update.py deleted file mode 100755 index a6b41676..00000000 --- a/tubular/scripts/retirement_bulk_status_update.py +++ /dev/null @@ -1,155 +0,0 @@ -#! /usr/bin/env python3 -""" -Command-line script to bulk update retirement states in LMS -""" - - -from datetime import datetime -from functools import partial -from os import path -import logging -import sys - -import click -from six import text_type - -# Add top-level module path to sys.path before importing tubular code. -sys.path.append(path.dirname(path.dirname(path.abspath(__file__)))) - -# pylint: disable=wrong-import-position -from tubular.scripts.helpers import ( - _config_or_exit, - _fail, - _fail_exception, - _log, - _setup_lms_api_or_exit -) - - -SCRIPT_SHORTNAME = 'Bulk Status' - -# Return codes for various fail cases -ERR_NO_CONFIG = -1 -ERR_BAD_CONFIG = -2 -ERR_FETCHING = -3 -ERR_UPDATING = -4 -ERR_SETUP_FAILED = -5 - -LOG = partial(_log, SCRIPT_SHORTNAME) -FAIL = partial(_fail, SCRIPT_SHORTNAME) -FAIL_EXCEPTION = partial(_fail_exception, SCRIPT_SHORTNAME) -CONFIG_OR_EXIT = partial(_config_or_exit, FAIL_EXCEPTION, ERR_BAD_CONFIG) -SETUP_LMS_OR_EXIT = partial(_setup_lms_api_or_exit, FAIL, ERR_SETUP_FAILED) - - -logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) - - -def validate_dates(_, __, value): - """ - Click input validator for date options. - - Validates string format - - Transforms the string into a datetime.Date object - - Validates the date is less than or equal to today - - Returns the Date, or raises a click.BadParameter - """ - try: - date = datetime.strptime(value, '%Y-%m-%d').date() - if date > datetime.now().date(): - raise ValueError() - return date - except ValueError: - raise click.BadParameter('Dates need to be in the format of YYYY-MM-DD and today or earlier.') - - -def _fetch_learners_to_update_or_exit(config, start_date, end_date, initial_state): - """ - Makes the call to fetch learners to be bulk updated, returns the list of learners - or exits. - """ - LOG('Fetching users in state {} created from {} to {}'.format(initial_state, start_date, end_date)) - try: - return config['LMS'].get_learners_by_date_and_status(initial_state, start_date, end_date) - except Exception as exc: # pylint: disable=broad-except - FAIL_EXCEPTION(ERR_FETCHING, 'Unexpected error occurred fetching users to update!', exc) - - -def _update_learners_or_exit(config, learners, new_state=None, rewind_state=False): - """ - Iterates the list of learners, setting each to the new state. On any error - it will exit the script. If rewind_state is set to True then the learner - will be reset to their previous state. - """ - if (not new_state and not rewind_state) or (rewind_state and new_state): - FAIL(ERR_BAD_CONFIG, "You must specify either the boolean rewind_state or a new state to set learners to.") - LOG('Updating {} learners to {}'.format(len(learners), new_state)) - try: - for learner in learners: - if rewind_state: - new_state = learner['last_state']['state_name'] - config['LMS'].update_learner_retirement_state( - learner['original_username'], - new_state, - 'Force updated via retirement_bulk_status_update Tubular script', - force=True - ) - except Exception as exc: # pylint: disable=broad-except - FAIL_EXCEPTION(ERR_UPDATING, 'Unexpected error occurred updating users!', exc) - - -@click.command("update_statuses") -@click.option( - '--config_file', - help='YAML file that contains retirement-related configuration for this environment.' -) -@click.option( - '--initial_state', - help='Find learners in this retirement state. Use the state name ex: PENDING, COMPLETE' -) -@click.option( - '--new_state', - help='Set any found learners to this new state. Use the state name ex: PENDING, COMPLETE', - default=None -) -@click.option( - '--start_date', - callback=validate_dates, - help='(YYYY-MM-DD) Earliest creation date for retirements to act on.' -) -@click.option( - '--end_date', - callback=validate_dates, - help='(YYYY-MM-DD) Latest creation date for retirements to act on.' -) -@click.option( - '--rewind-state', - help='Rewinds to the last_state for learners. Useful for resetting ERRORED users', - default=False, - is_flag=True -) -def update_statuses(config_file, initial_state, new_state, start_date, end_date, rewind_state): - """ - Bulk-updates user retirement statuses which are in the specified state -and- retirement was - requested between a start date and end date. - """ - try: - LOG('Starting bulk update script: Config: {}'.format(config_file)) - - if not config_file: - FAIL(ERR_NO_CONFIG, 'No config file passed in.') - - config = CONFIG_OR_EXIT(config_file) - SETUP_LMS_OR_EXIT(config) - - learners = _fetch_learners_to_update_or_exit(config, start_date, end_date, initial_state) - _update_learners_or_exit(config, learners, new_state, rewind_state) - - LOG('Bulk update complete') - except Exception as exc: - print(text_type(exc)) - raise - - -if __name__ == '__main__': - # pylint: disable=unexpected-keyword-arg, no-value-for-parameter - update_statuses(auto_envvar_prefix='RETIREMENT') diff --git a/tubular/scripts/structures.py b/tubular/scripts/structures.py deleted file mode 100644 index b37ec0d4..00000000 --- a/tubular/scripts/structures.py +++ /dev/null @@ -1,195 +0,0 @@ -#! /usr/bin/env python3 -""" -Script to detect and prune old Structure documents from the "Split" Modulestore -MongoDB (edxapp.modulestore.structures by default). See docstring/help for the -"make_plan" and "prune" commands for more details. -""" - -import logging -import os -import sys - -import click -import click_log - -# Add top-level module path to sys.path before importing tubular code. -sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - -from tubular.splitmongo import ChangePlan, SplitMongoBackend # pylint: disable=wrong-import-position - -LOG = logging.getLogger('structures') -click_log.basic_config(LOG) - - -@click.group() -@click.option( - '--connection', - default="mongodb://localhost:27017", - help=( - 'Connection string to the target mongo database. This defaults to ' - 'localhost without password (that will work against devstack). ' - 'You may need to use urllib.parse.quote_plus() to percent-escape ' - 'your username and password.' - ) -) -@click.option( - '--database-name', - default='edxapp', - help='Name of the edX Mongo database containing the course structures to prune.' -) -@click.pass_context -def cli(ctx, connection, database_name): - """ - Recover space on MongoDB for edx-platform by deleting unreachable, - historical course content data. To use, first make a change plan with the - "make_plan" command, and then execute that plan against the database with - the "prune" command. - - This script provides logic to clean up old, unused course content data for - the DraftVersioningModuleStore modulestore, more commonly referred to as the - "Split Mongo" or "Split" modulestore (DraftVersioningModuleStore subclasses - SplitMongoModuleStore). All courses and assets that have newer style locator - keys use DraftVersioningModuleStore. These keys start with "course-v1:", - "ccx-v1:", or "block-v1:". Studio authored content data for this modulestore - is saved as immutable data structures. The edx-platform code never cleans up - old data however, meaning there is an unbounded history of a course's - content revisions stored in MongoDB. - - The older modulestore is DraftModuleStore, sometimes called "Old Mongo". - This code does not address that modulestore in any way. That modulestore - handles courses that use the old "/" separator, such as - "MITx/6.002x/2012_Spring", as well as assets starting with "i4x://". - """ - if ctx.obj is None: - ctx.obj = dict() - - ctx.obj['BACKEND'] = SplitMongoBackend(connection, database_name) - - -@cli.command("make_plan") -@click_log.simple_verbosity_option(default='INFO') -@click.argument('plan_file', type=click.File('w')) -@click.option( - '--details', - type=click.File('w'), - default=None, - help="Name of file to write the human-readable details of the Change Plan." -) -@click.option( - '--retain', - default=2, - type=click.IntRange(0, None), - help=("The maximum number of intermediate structures to preserve for any " - "single branch of an active version. This value does not include the " - "active or original structures (those are always preserved). Defaults " - "to 2. Put 0 here if you want to prune as much as possible.") -) -@click.option( - '--delay', - default=15000, - type=click.IntRange(0, None), - help=("Delay in milliseconds between queries to fetch structures from MongoDB " - "during plan creation. Tune to adjust load on the database.") -) -@click.option( - '--batch-size', - default=10000, - type=click.IntRange(1, None), - help="How many Structures do we fetch at a time?" -) -@click.option( - '--ignore-missing/--no-ignore-missing', - default=False, - help=("Force plan creation, even if missing structures are found. " - "Should repair invalid ids by repointing to original. " - "Review of plan highly recommended") -) -@click.option( - '--dump-structures/--no-dump-structures', - default=False, - help="Dump all strucutres to stderr for debugging or recording state before cleanup." -) -@click.pass_context -def make_plan(ctx, plan_file, details, retain, delay, batch_size, ignore_missing, dump_structures): - """ - Create a Change Plan JSON file describing the operations needed to prune the - database. This command is read-only and does not alter the database. - - The Change Plan JSON is a dictionary with two keys: - - "delete" - A sorted array of Structure document IDs to delete. Since MongoDB - object IDs are created in ascending order by timestamp, this means that the - oldest documents come earlier in the list. - - "update_parents" - A list of [Structure ID, New Parent/Previous ID] pairs. - This is used to re-link the oldest preserved Intermediate Structure back to - the Original Structure, so that we don't leave the database in a state where - a Structure's "previous_version" points to a deleted Structure. - - Specifying a --details file will generate a more verbose, human-readable - text description of the Change Plan for verification purposes. The details - file will only display Structures that are reachable from an Active Version, - so any Structures that are "orphaned" as a result of partial runs of this - script or Studio race conditions will not be reflected. That being said, - orphaned Structures are detected and properly noted in the Change Plan JSON. - """ - structures_graph = ctx.obj['BACKEND'].structures_graph(delay / 1000.0, batch_size) - - # This will create the details file as a side-effect, if specified. - change_plan = ChangePlan.create(structures_graph, retain, ignore_missing, dump_structures, details) - change_plan.dump(plan_file) - - -@cli.command() -@click_log.simple_verbosity_option(default='INFO') -@click.argument('plan_file', type=click.File('r')) -@click.option( - '--delay', - default=15000, - type=click.IntRange(0, None), - help=("Delay in milliseconds between batch deletions during pruning. Tune to " - "adjust load on the database.") -) -@click.option( - '--batch-size', - default=1000, - type=click.IntRange(1, None), - help=("How many Structures do we delete at a time? Tune to adjust load on " - "the database.") -) -@click.option( - '--start', - default=None, - help=("Structure ID to start deleting from. Specifying a Structure ID that " - "is not in the Change Plan is an error. Specifying a Structure ID that " - "has already been deleted is NOT an error, so it's safe to re-run.") -) -@click.pass_context -def prune(ctx, plan_file, delay, batch_size, start): - """ - Prune the MongoDB database according to a Change Plan file. - - This command tries to be as safe as possible. It executes parent updates - before deletes, so an interruption at any point should be safe in that it - won't leave the structure graphs in an inconsistent state. It should also - be safe to resume pruning with the same Change Plan in the event of an - interruption. - - It's also safe to run while Studio is still operating, though you should be - careful to test and tweak the delay and batch_size options to throttle load - on your database. - """ - change_plan = ChangePlan.load(plan_file) - if start is not None and start not in change_plan.delete: - raise click.BadParameter( - "{} is not in the Change Plan {}".format( - start, click.format_filename(plan_file.name) - ), - param_hint='--start' - ) - ctx.obj['BACKEND'].update(change_plan, delay / 1000.0, batch_size, start) - - -if __name__ == '__main__': - # pylint doesn't grok click magic, but this is straight from their docs... - cli(obj={}) # pylint: disable=no-value-for-parameter, unexpected-keyword-arg diff --git a/tubular/tests/mixins.py b/tubular/tests/mixins.py deleted file mode 100644 index 49aba2c4..00000000 --- a/tubular/tests/mixins.py +++ /dev/null @@ -1,23 +0,0 @@ -from urllib.parse import urljoin - -import responses - -from tubular import edx_api - -FAKE_ACCESS_TOKEN = 'THIS_IS_A_JWT' -CONTENT_TYPE = 'application/json' - - -class OAuth2Mixin: - @staticmethod - def mock_access_token_response(status=200): - """ - Mock POST requests to retrieve an access token for this site's service user. - """ - responses.add( - responses.POST, - urljoin('http://localhost:18000/', edx_api.OAUTH_ACCESS_TOKEN_URL), - status=status, - json={'access_token': FAKE_ACCESS_TOKEN, 'expires_in': 60}, - content_type=CONTENT_TYPE - ) diff --git a/tubular/tests/test_amplitude.py b/tubular/tests/test_amplitude.py deleted file mode 100644 index 05d74657..00000000 --- a/tubular/tests/test_amplitude.py +++ /dev/null @@ -1,80 +0,0 @@ -""" -Tests for the Amplitude API functionality -""" -import ddt -import os -import logging -import unittest -from unittest import mock - -import requests_mock - -MAX_ATTEMPTS = int(os.environ.get("RETRY_MAX_ATTEMPTS", 5)) -from tubular.amplitude_api import AmplitudeApi, AmplitudeException, AmplitudeRecoverableException - - -@ddt.ddt -@requests_mock.Mocker() -class TestAmplitude(unittest.TestCase): - """ - Class containing tests of all code interacting with Amplitude. - """ - def setUp(self): - super().setUp() - self.user = {"user": {"id": "1234"}} - self.amplitude = AmplitudeApi("test-api-key", "test-secret-key") - - def _mock_delete(self, req_mock, status_code, message=None): - """ - Send a mock request with dummy headers and status code. - - """ - req_mock.post( - "https://amplitude.com/api/2/deletions/users", - headers = {"Content-Type": "application/json"}, - json = {}, - status_code = status_code - ) - - def test_delete_happy_path(self, req_mock): - """ - This test pass status_code 200 to mock_delete see how AmplitudeApi respond in happy path. - - """ - self._mock_delete(req_mock, 200) - logger = logging.getLogger("tubular.amplitude_api") - with mock.patch.object(logger, "info") as mock_info: - self.amplitude.delete_user(self.user) - - self.assertEqual(mock_info.call_args, [("Amplitude user deletion succeeded",)]) - - self.assertEqual(len(req_mock.request_history), 1) - request = req_mock.request_history[0] - self.assertEqual(request.json(), {"user_ids": ["1234"], 'ignore_invalid_id': 'true', "requester": "user-retirement-pipeline"}) - - def test_delete_fatal_error(self, req_mock): - """ - This test pass status_code 404 to see how AmplitudeApi respond in fatal error case. - - """ - self._mock_delete(req_mock, 404) - message=None - logger = logging.getLogger("tubular.amplitude_api") - with mock.patch.object(logger, "error") as mock_error: - with self.assertRaises(AmplitudeException) as exc: - self.amplitude.delete_user(self.user) - error = "Amplitude user deletion failed due to {message}".format(message=message) - self.assertEqual(mock_error.call_args, [(error,)]) - self.assertEqual(str(exc.exception), error) - - @ddt.data(429, 500) - def test_delete_recoverable_error(self, status_code, req_mock): - """ - This test pass status_code 429 and 500 to see how AmplitudeApi respond to recoverable cases. - - """ - self._mock_delete(req_mock, status_code) - - with self.assertRaises(AmplitudeRecoverableException): - self.amplitude.delete_user(self.user) - self.assertEqual(len(req_mock.request_history), MAX_ATTEMPTS) diff --git a/tubular/tests/test_braze.py b/tubular/tests/test_braze.py deleted file mode 100644 index 0a178f8a..00000000 --- a/tubular/tests/test_braze.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -Tests for the Braze API functionality -""" -import ddt -import os -import logging -import unittest -from unittest import mock - -import requests_mock - -os.environ['RETRY_BRAZE_MAX_ATTEMPTS'] = '2' -from tubular.braze_api import BrazeApi, BrazeException, BrazeRecoverableException - - -@ddt.ddt -@requests_mock.Mocker() -class TestBraze(unittest.TestCase): - """ - Class containing tests of all code interacting with Braze. - """ - def setUp(self): - super().setUp() - self.learner = {'user': {'id': 1234}} - self.braze = BrazeApi('test-key', 'test-instance') - - def _mock_delete(self, req_mock, status_code, message=None): - req_mock.post( - 'https://rest.test-instance.braze.com/users/delete', - request_headers={'Authorization': 'Bearer test-key'}, - json={'message': message} if message else {}, - status_code=status_code - ) - - def test_delete_happy_path(self, req_mock): - self._mock_delete(req_mock, 200) - - logger = logging.getLogger('tubular.braze_api') - with mock.patch.object(logger, 'info') as mock_info: - self.braze.delete_user(self.learner) - - self.assertEqual(mock_info.call_args, [('Braze user deletion succeeded',)]) - - self.assertEqual(len(req_mock.request_history), 1) - request = req_mock.request_history[0] - self.assertEqual(request.json(), {'external_ids': [1234]}) - - def test_delete_fatal_error(self, req_mock): - self._mock_delete(req_mock, 404, message='Test Error Message') - - logger = logging.getLogger('tubular.braze_api') - with mock.patch.object(logger, 'error') as mock_error: - with self.assertRaises(BrazeException) as exc: - self.braze.delete_user(self.learner) - - error = 'Braze user deletion failed due to Test Error Message' - self.assertEqual(mock_error.call_args, [(error,)]) - self.assertEqual(str(exc.exception), error) - - @ddt.data(429, 500) - def test_delete_recoverable_error(self, status_code, req_mock): - self._mock_delete(req_mock, status_code) - - with self.assertRaises(BrazeRecoverableException): - self.braze.delete_user(self.learner) - - self.assertEqual(len(req_mock.request_history), 2) diff --git a/tubular/tests/test_data/uploading.txt b/tubular/tests/test_data/uploading.txt deleted file mode 100644 index eb2b87e6..00000000 --- a/tubular/tests/test_data/uploading.txt +++ /dev/null @@ -1 +0,0 @@ -Upload this file on s3 in tests. \ No newline at end of file diff --git a/tubular/tests/test_edx_api.py b/tubular/tests/test_edx_api.py deleted file mode 100644 index 8db6e27e..00000000 --- a/tubular/tests/test_edx_api.py +++ /dev/null @@ -1,560 +0,0 @@ -""" -Tests for edX API calls. -""" -import unittest -from urllib.parse import urljoin - -import requests -import responses -from ddt import data, ddt, unpack -from mock import DEFAULT, patch -from requests.exceptions import ConnectionError, HTTPError -from responses import GET, PATCH, POST, matchers -from responses.registries import OrderedRegistry - -from tubular import edx_api -from tubular.tests.mixins import OAuth2Mixin -from tubular.tests.retirement_helpers import ( - FAKE_DATETIME_OBJECT, - FAKE_ORIGINAL_USERNAME, - FAKE_RESPONSE_MESSAGE, - FAKE_USERNAME_MAPPING, - FAKE_USERNAMES, - TEST_RETIREMENT_QUEUE_STATES, - TEST_RETIREMENT_STATE, - get_fake_user_retirement -) - - -class BackoffTriedException(Exception): - """ - Raise this from a backoff handler to indicate that backoff was tried. - """ - - -@ddt -class TestLmsApi(OAuth2Mixin, unittest.TestCase): - """ - Test the edX LMS API client. - """ - - @responses.activate(registry=OrderedRegistry) - def setUp(self): - super().setUp() - self.mock_access_token_response() - self.lms_base_url = 'http://localhost:18000/' - self.lms_api = edx_api.LmsApi( - self.lms_base_url, - self.lms_base_url, - 'the_client_id', - 'the_client_secret' - ) - - @patch.object(edx_api.LmsApi, 'learners_to_retire') - def test_learners_to_retire(self, mock_method): - params = { - 'states': TEST_RETIREMENT_QUEUE_STATES, - 'cool_off_days': 365, - } - responses.add( - GET, - urljoin(self.lms_base_url, 'api/user/v1/accounts/retirement_queue/'), - match=[matchers.query_param_matcher(params)], - ) - self.lms_api.learners_to_retire( - TEST_RETIREMENT_QUEUE_STATES, cool_off_days=365) - mock_method.assert_called_once_with( - TEST_RETIREMENT_QUEUE_STATES, cool_off_days=365) - - @patch.object(edx_api.LmsApi, 'get_learners_by_date_and_status') - def test_get_learners_by_date_and_status(self, mock_method): - query_params = { - 'start_date': FAKE_DATETIME_OBJECT.strftime('%Y-%m-%d'), - 'end_date': FAKE_DATETIME_OBJECT.strftime('%Y-%m-%d'), - 'state': TEST_RETIREMENT_STATE, - } - responses.add( - GET, - urljoin(self.lms_base_url, 'api/user/v1/accounts/retirements_by_status_and_date/'), - match=[matchers.query_param_matcher(query_params)] - ) - self.lms_api.get_learners_by_date_and_status( - state_to_request=TEST_RETIREMENT_STATE, - start_date=FAKE_DATETIME_OBJECT, - end_date=FAKE_DATETIME_OBJECT - ) - mock_method.assert_called_once_with( - state_to_request=TEST_RETIREMENT_STATE, - start_date=FAKE_DATETIME_OBJECT, - end_date=FAKE_DATETIME_OBJECT - ) - - @patch.object(edx_api.LmsApi, 'get_learner_retirement_state') - def test_get_learner_retirement_state(self, mock_method): - responses.add( - GET, - urljoin(self.lms_base_url, f'api/user/v1/accounts/{FAKE_ORIGINAL_USERNAME}/retirement_status/'), - ) - self.lms_api.get_learner_retirement_state( - username=FAKE_ORIGINAL_USERNAME - ) - mock_method.assert_called_once_with( - username=FAKE_ORIGINAL_USERNAME - ) - - @patch.object(edx_api.LmsApi, 'update_learner_retirement_state') - def test_update_leaner_retirement_state(self, mock_method): - json_data = { - 'username': FAKE_ORIGINAL_USERNAME, - 'new_state': TEST_RETIREMENT_STATE, - 'response': FAKE_RESPONSE_MESSAGE, - } - responses.add( - PATCH, - urljoin(self.lms_base_url, 'api/user/v1/accounts/update_retirement_status/'), - match=[matchers.json_params_matcher(json_data)] - ) - self.lms_api.update_learner_retirement_state( - username=FAKE_ORIGINAL_USERNAME, - new_state_name=TEST_RETIREMENT_STATE, - message=FAKE_RESPONSE_MESSAGE - ) - mock_method.assert_called_once_with( - username=FAKE_ORIGINAL_USERNAME, - new_state_name=TEST_RETIREMENT_STATE, - message=FAKE_RESPONSE_MESSAGE - ) - - @data( - { - 'api_url': 'api/user/v1/accounts/deactivate_logout/', - 'mock_method': 'retirement_deactivate_logout', - 'method': 'POST', - }, - { - 'api_url': 'api/discussion/v1/accounts/retire_forum/', - 'mock_method': 'retirement_retire_forum', - 'method': 'POST', - }, - { - 'api_url': 'api/user/v1/accounts/retire_mailings/', - 'mock_method': 'retirement_retire_mailings', - 'method': 'POST', - }, - { - 'api_url': 'api/enrollment/v1/unenroll/', - 'mock_method': 'retirement_unenroll', - 'method': 'POST', - }, - { - 'api_url': 'api/edxnotes/v1/retire_user/', - 'mock_method': 'retirement_retire_notes', - 'method': 'POST', - }, - { - 'api_url': 'api/user/v1/accounts/retire_misc/', - 'mock_method': 'retirement_lms_retire_misc', - 'method': 'POST', - }, - { - 'api_url': 'api/user/v1/accounts/retire/', - 'mock_method': 'retirement_lms_retire', - 'method': 'POST', - }, - { - 'api_url': 'api/user/v1/accounts/retirement_partner_report/', - 'mock_method': 'retirement_partner_queue', - 'method': 'PUT', - }, - ) - @unpack - @patch.multiple( - 'tubular.edx_api.LmsApi', - retirement_deactivate_logout=DEFAULT, - retirement_retire_forum=DEFAULT, - retirement_retire_mailings=DEFAULT, - retirement_unenroll=DEFAULT, - retirement_retire_notes=DEFAULT, - retirement_lms_retire_misc=DEFAULT, - retirement_lms_retire=DEFAULT, - retirement_partner_queue=DEFAULT, - ) - def test_learner_retirement(self, api_url, mock_method, method, **kwargs): - json_data = { - 'username': FAKE_ORIGINAL_USERNAME, - } - responses.add( - method, - urljoin(self.lms_base_url, api_url), - match=[matchers.json_params_matcher(json_data)] - ) - getattr(self.lms_api, mock_method)(get_fake_user_retirement(original_username=FAKE_ORIGINAL_USERNAME)) - kwargs[mock_method].assert_called_once_with(get_fake_user_retirement(original_username=FAKE_ORIGINAL_USERNAME)) - - @patch.object(edx_api.LmsApi, 'retirement_partner_report') - def test_retirement_partner_report(self, mock_method): - responses.add( - POST, - urljoin(self.lms_base_url, 'api/user/v1/accounts/retirement_partner_report/') - ) - self.lms_api.retirement_partner_report( - learner=get_fake_user_retirement( - original_username=FAKE_ORIGINAL_USERNAME - ) - ) - mock_method.assert_called_once_with( - learner=get_fake_user_retirement( - original_username=FAKE_ORIGINAL_USERNAME - ) - ) - - @patch.object(edx_api.LmsApi, 'retirement_partner_cleanup') - def test_retirement_partner_cleanup(self, mock_method): - json_data = FAKE_USERNAMES - responses.add( - POST, - urljoin(self.lms_base_url, 'api/user/v1/accounts/retirement_partner_report_cleanup/'), - match=[matchers.json_params_matcher(json_data)] - ) - self.lms_api.retirement_partner_cleanup( - usernames=FAKE_USERNAMES - ) - mock_method.assert_called_once_with( - usernames=FAKE_USERNAMES - ) - - @patch.object(edx_api.LmsApi, 'retirement_retire_proctoring_data') - def test_retirement_retire_proctoring_data(self, mock_method): - learner = get_fake_user_retirement() - responses.add( - POST, - urljoin(self.lms_base_url, f"api/edx_proctoring/v1/retire_user/{learner['user']['id']}/"), - ) - self.lms_api.retirement_retire_proctoring_data() - mock_method.assert_called_once() - - @patch.object(edx_api.LmsApi, 'retirement_retire_proctoring_backend_data') - def test_retirement_retire_proctoring_backend_data(self, mock_method): - learner = get_fake_user_retirement() - responses.add( - POST, - urljoin(self.lms_base_url, f"api/edx_proctoring/v1/retire_backend_user/{learner['user']['id']}/"), - ) - self.lms_api.retirement_retire_proctoring_backend_data() - mock_method.assert_called_once() - - @patch.object(edx_api.LmsApi, 'replace_lms_usernames') - def test_replace_lms_usernames(self, mock_method): - json_data = { - 'username_mappings': FAKE_USERNAME_MAPPING - } - responses.add( - POST, - urljoin(self.lms_base_url, 'api/user/v1/accounts/replace_usernames/'), - match=[matchers.json_params_matcher(json_data)] - ) - self.lms_api.replace_lms_usernames( - username_mappings=FAKE_USERNAME_MAPPING - ) - mock_method.assert_called_once_with( - username_mappings=FAKE_USERNAME_MAPPING - ) - - @patch.object(edx_api.LmsApi, 'replace_forums_usernames') - def test_replace_forums_usernames(self, mock_method): - json_data = { - 'username_mappings': FAKE_USERNAME_MAPPING - } - responses.add( - POST, - urljoin(self.lms_base_url, 'api/discussion/v1/accounts/replace_usernames/'), - match=[matchers.json_params_matcher(json_data)] - ) - self.lms_api.replace_forums_usernames( - username_mappings=FAKE_USERNAME_MAPPING - ) - mock_method.assert_called_once_with( - username_mappings=FAKE_USERNAME_MAPPING - ) - - @data(504, 500) - @patch('tubular.edx_api._backoff_handler') - @patch.object(edx_api.LmsApi, 'learners_to_retire') - def test_retrieve_learner_queue_backoff( - self, - svr_status_code, - mock_backoff_handler, - mock_learners_to_retire - ): - mock_backoff_handler.side_effect = BackoffTriedException - params = { - 'states': TEST_RETIREMENT_QUEUE_STATES, - 'cool_off_days': 365, - } - response = requests.Response() - response.status_code = svr_status_code - responses.add( - GET, - urljoin(self.lms_base_url, 'api/user/v1/accounts/retirement_queue/'), - status=200, - match=[matchers.query_param_matcher(params)], - ) - - mock_learners_to_retire.side_effect = HTTPError(response=response) - with self.assertRaises(BackoffTriedException): - self.lms_api.learners_to_retire( - TEST_RETIREMENT_QUEUE_STATES, cool_off_days=365) - - @data(104) - @responses.activate - @patch('tubular.edx_api._backoff_handler') - @patch.object(edx_api.LmsApi, 'retirement_partner_cleanup') - def test_retirement_partner_cleanup_backoff_on_connection_error( - self, - svr_status_code, - mock_backoff_handler, - mock_retirement_partner_cleanup - ): - mock_backoff_handler.side_effect = BackoffTriedException - response = requests.Response() - response.status_code = svr_status_code - mock_retirement_partner_cleanup.retirement_partner_cleanup.side_effect = ConnectionError( - response=response - ) - with self.assertRaises(BackoffTriedException): - self.lms_api.retirement_partner_cleanup([{'original_username': 'test'}]) - - -class TestEcommerceApi(OAuth2Mixin, unittest.TestCase): - """ - Test the edX Ecommerce API client. - """ - - @responses.activate(registry=OrderedRegistry) - def setUp(self): - super().setUp() - self.mock_access_token_response() - self.lms_base_url = 'http://localhost:18000/' - self.ecommerce_base_url = 'http://localhost:18130/' - self.ecommerce_api = edx_api.EcommerceApi( - self.lms_base_url, - self.ecommerce_base_url, - 'the_client_id', - 'the_client_secret' - ) - - @patch.object(edx_api.EcommerceApi, 'retire_learner') - def test_retirement_partner_report(self, mock_method): - json_data = { - 'username': FAKE_ORIGINAL_USERNAME, - } - responses.add( - POST, - urljoin(self.lms_base_url, 'api/v2/user/retire/'), - match=[matchers.json_params_matcher(json_data)] - ) - self.ecommerce_api.retire_learner( - learner=get_fake_user_retirement(original_username=FAKE_ORIGINAL_USERNAME) - ) - mock_method.assert_called_once_with( - learner=get_fake_user_retirement(original_username=FAKE_ORIGINAL_USERNAME) - ) - - @patch.object(edx_api.EcommerceApi, 'retire_learner') - def get_tracking_key(self, mock_method): - original_username = { - 'original_username': get_fake_user_retirement(original_username=FAKE_ORIGINAL_USERNAME) - } - responses.add( - GET, - urljoin(self.lms_base_url, f"api/v2/retirement/tracking_id/{original_username}/"), - ) - self.ecommerce_api.get_tracking_key( - learner=get_fake_user_retirement(original_username=FAKE_ORIGINAL_USERNAME) - ) - mock_method.assert_called_once_with( - learner=get_fake_user_retirement(original_username=FAKE_ORIGINAL_USERNAME) - ) - - @patch.object(edx_api.EcommerceApi, 'replace_usernames') - def test_replace_usernames(self, mock_method): - json_data = { - "username_mappings": FAKE_USERNAME_MAPPING - } - responses.add( - POST, - urljoin(self.lms_base_url, 'api/v2/user_management/replace_usernames/'), - match=[matchers.json_params_matcher(json_data)] - ) - self.ecommerce_api.replace_usernames( - username_mappings=FAKE_USERNAME_MAPPING - ) - mock_method.assert_called_once_with( - username_mappings=FAKE_USERNAME_MAPPING - ) - - -class TestCredentialApi(OAuth2Mixin, unittest.TestCase): - """ - Test the edX Credential API client. - """ - - @responses.activate(registry=OrderedRegistry) - def setUp(self): - super().setUp() - self.mock_access_token_response() - self.lms_base_url = 'http://localhost:18000/' - self.credentials_base_url = 'http://localhost:18150/' - self.credentials_api = edx_api.CredentialsApi( - self.lms_base_url, - self.credentials_base_url, - 'the_client_id', - 'the_client_secret' - ) - - @patch.object(edx_api.CredentialsApi, 'retire_learner') - def test_retire_learner(self, mock_method): - json_data = { - 'username': FAKE_ORIGINAL_USERNAME - } - responses.add( - POST, - urljoin(self.credentials_base_url, 'user/retire/'), - match=[matchers.json_params_matcher(json_data)] - ) - self.credentials_api.retire_learner( - learner=get_fake_user_retirement(original_username=FAKE_ORIGINAL_USERNAME) - ) - mock_method.assert_called_once_with( - learner=get_fake_user_retirement(original_username=FAKE_ORIGINAL_USERNAME) - ) - - @patch.object(edx_api.CredentialsApi, 'replace_usernames') - def test_replace_usernames(self, mock_method): - json_data = { - "username_mappings": FAKE_USERNAME_MAPPING - } - responses.add( - POST, - urljoin(self.credentials_base_url, 'api/v2/replace_usernames/'), - match=[matchers.json_params_matcher(json_data)] - ) - self.credentials_api.replace_usernames( - username_mappings=FAKE_USERNAME_MAPPING - ) - mock_method.assert_called_once_with( - username_mappings=FAKE_USERNAME_MAPPING - ) - - -class TestDiscoveryApi(OAuth2Mixin, unittest.TestCase): - """ - Test the edX Discovery API client. - """ - - @responses.activate(registry=OrderedRegistry) - def setUp(self): - super().setUp() - self.mock_access_token_response() - self.lms_base_url = 'http://localhost:18000/' - self.discovery_base_url = 'http://localhost:18150/' - self.discovery_api = edx_api.DiscoveryApi( - self.lms_base_url, - self.discovery_base_url, - 'the_client_id', - 'the_client_secret' - ) - - @patch.object(edx_api.DiscoveryApi, 'replace_usernames') - def test_replace_usernames(self, mock_method): - json_data = { - "username_mappings": FAKE_USERNAME_MAPPING - } - responses.add( - POST, - urljoin(self.discovery_base_url, 'api/v1/replace_usernames/'), - match=[matchers.json_params_matcher(json_data)] - ) - self.discovery_api.replace_usernames( - username_mappings=FAKE_USERNAME_MAPPING - ) - mock_method.assert_called_once_with( - username_mappings=FAKE_USERNAME_MAPPING - ) - - -class TestDemographicsApi(OAuth2Mixin, unittest.TestCase): - """ - Test the edX Demographics API client. - """ - - @responses.activate(registry=OrderedRegistry) - def setUp(self): - super().setUp() - self.mock_access_token_response() - self.lms_base_url = 'http://localhost:18000/' - self.demographics_base_url = 'http://localhost:18360/' - self.demographics_api = edx_api.DemographicsApi( - self.lms_base_url, - self.demographics_base_url, - 'the_client_id', - 'the_client_secret' - ) - - @patch.object(edx_api.DemographicsApi, 'retire_learner') - def test_retire_learner(self, mock_method): - json_data = { - 'lms_user_id': get_fake_user_retirement()['user']['id'] - } - responses.add( - POST, - urljoin(self.demographics_base_url, 'demographics/api/v1/retire_demographics/'), - match=[matchers.json_params_matcher(json_data)] - ) - self.demographics_api.retire_learner( - learner=get_fake_user_retirement() - ) - mock_method.assert_called_once_with( - learner=get_fake_user_retirement() - ) - - -class TestLicenseManagerApi(OAuth2Mixin, unittest.TestCase): - """ - Test the edX License Manager API client. - """ - - @responses.activate(registry=OrderedRegistry) - def setUp(self): - super().setUp() - self.mock_access_token_response() - self.lms_base_url = 'http://localhost:18000/' - self.license_manager_base_url = 'http://localhost:18170/' - self.license_manager_api = edx_api.LicenseManagerApi( - self.lms_base_url, - self.license_manager_base_url, - 'the_client_id', - 'the_client_secret' - ) - - @patch.object(edx_api.LicenseManagerApi, 'retire_learner') - def test_retire_learner(self, mock_method): - json_data = { - 'lms_user_id': get_fake_user_retirement()['user']['id'], - 'original_username': FAKE_ORIGINAL_USERNAME, - } - responses.add( - POST, - urljoin(self.license_manager_base_url, 'api/v1/retire_user/'), - match=[matchers.json_params_matcher(json_data)] - ) - self.license_manager_api.retire_learner( - learner=get_fake_user_retirement( - original_username=FAKE_ORIGINAL_USERNAME - ) - ) - mock_method.assert_called_once_with( - learner=get_fake_user_retirement( - original_username=FAKE_ORIGINAL_USERNAME - ) - ) diff --git a/tubular/tests/test_get_learners_to_retire.py b/tubular/tests/test_get_learners_to_retire.py deleted file mode 100644 index b44b17cd..00000000 --- a/tubular/tests/test_get_learners_to_retire.py +++ /dev/null @@ -1,160 +0,0 @@ -""" -Test the get_learners_to_retire.py script -""" - -import os -from mock import patch, DEFAULT - -from click.testing import CliRunner -from requests.exceptions import HTTPError - -from tubular.scripts.get_learners_to_retire import ( - get_learners_to_retire -) -from tubular.tests.retirement_helpers import fake_config_file, get_fake_user_retirement - - -def _call_script(expected_user_files, cool_off_days=1, output_dir='test', user_count_error_threshold=200, max_user_batch_size=201): - """ - Call the retired learner script with the given username and a generic, temporary config file. - Returns the CliRunner.invoke results - """ - runner = CliRunner() - with runner.isolated_filesystem(): - with open('test_config.yml', 'w') as f: - fake_config_file(f) - result = runner.invoke( - get_learners_to_retire, - args=[ - '--config_file', 'test_config.yml', - '--cool_off_days', cool_off_days, - '--output_dir', output_dir, - '--user_count_error_threshold', user_count_error_threshold, - '--max_user_batch_size', max_user_batch_size - ] - ) - print(result) - print(result.output) - - # This is the number of users in the mocked call, each should have a file if the number is - # greater than 0, otherwise a failure is expected and the output dir should not exist - if expected_user_files: - assert len(os.listdir(output_dir)) == expected_user_files - else: - assert not os.path.exists(output_dir) - return result - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - learners_to_retire=DEFAULT -) -def test_success(*args, **kwargs): - mock_get_access_token = args[0] - mock_get_learners_to_retire = kwargs['learners_to_retire'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_get_learners_to_retire.return_value = [ - get_fake_user_retirement(original_username='test_user1'), - get_fake_user_retirement(original_username='test_user2'), - ] - - result = _call_script(2) - - # Called once per API we instantiate (LMS, ECommerce, Credentials) - assert mock_get_access_token.call_count == 1 - mock_get_learners_to_retire.assert_called_once() - - assert result.exit_code == 0 - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - learners_to_retire=DEFAULT -) -def test_lms_down(*args, **kwargs): - mock_get_access_token = args[0] - mock_get_learners_to_retire = kwargs['learners_to_retire'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_get_learners_to_retire.side_effect = HTTPError - - result = _call_script(0) - - # Called once per API we instantiate (LMS, ECommerce, Credentials) - assert mock_get_access_token.call_count == 1 - mock_get_learners_to_retire.assert_called_once() - - assert result.exit_code == 1 - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - learners_to_retire=DEFAULT -) -def test_misconfigured(*args, **kwargs): - mock_get_access_token = args[0] - mock_get_learners_to_retire = kwargs['learners_to_retire'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_get_learners_to_retire.side_effect = HTTPError - - result = _call_script(0) - - # Called once per API we instantiate (LMS, ECommerce, Credentials) - assert mock_get_access_token.call_count == 1 - mock_get_learners_to_retire.assert_called_once() - - assert result.exit_code == 1 - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - learners_to_retire=DEFAULT -) -def test_too_many_users(*args, **kwargs): - mock_get_access_token = args[0] - mock_get_learners_to_retire = kwargs['learners_to_retire'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_get_learners_to_retire.return_value = [ - get_fake_user_retirement(original_username='test_user1'), - get_fake_user_retirement(original_username='test_user2'), - ] - - result = _call_script(0, user_count_error_threshold=1) - - # Called once per API we instantiate (LMS, ECommerce, Credentials) - assert mock_get_access_token.call_count == 1 - mock_get_learners_to_retire.assert_called_once() - - assert result.exit_code == -1 - assert 'Too many learners' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - learners_to_retire=DEFAULT -) -def test_users_limit(*args, **kwargs): - mock_get_access_token = args[0] - mock_get_learners_to_retire = kwargs['learners_to_retire'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_get_learners_to_retire.return_value = [ - get_fake_user_retirement(original_username='test_user1'), - get_fake_user_retirement(original_username='test_user2'), - ] - - result = _call_script(1, user_count_error_threshold=200, max_user_batch_size=1) - - # Called once per API we instantiate (LMS, ECommerce, Credentials) - assert mock_get_access_token.call_count == 1 - mock_get_learners_to_retire.assert_called_once() - - assert result.exit_code == 0 \ No newline at end of file diff --git a/tubular/tests/test_hubspot.py b/tubular/tests/test_hubspot.py deleted file mode 100644 index da9b07c2..00000000 --- a/tubular/tests/test_hubspot.py +++ /dev/null @@ -1,159 +0,0 @@ -""" -Tests for the Sailthru API functionality -""" -import os -import logging -import unittest - -from unittest import mock -import requests_mock -from six.moves import reload_module - -# This module is imported separately solely so it can be re-loaded below. -from tubular import hubspot_api - -# This HubspotAPI class will be used without being re-loaded. -from tubular.hubspot_api import HubspotAPI - -# Change the number of retries for Hubspot API's delete_user call to 1. -# Then reload hubspot_api so only a single retry is performed. -os.environ['RETRY_HUBSPOT_MAX_ATTEMPTS'] = "1" -reload_module(hubspot_api) # pylint: disable=too-many-function-args - - -@requests_mock.Mocker() -@mock.patch.object(HubspotAPI, 'send_marketing_alert') -class TestHubspot(unittest.TestCase): - """ - Class containing tests of all code interacting with Hubspot. - """ - def setUp(self): - super(TestHubspot, self).setUp() - self.test_learner = {'original_email': 'foo@bar.com'} - self.api_key = 'example_key' - self.test_vid = 12345 - self.test_region = 'test-east-1' - self.from_address = 'no-reply@example.com' - self.alert_email = 'marketing@example.com' - - def _mock_get_vid(self, req_mock, status_code): - req_mock.get( - hubspot_api.GET_VID_FROM_EMAIL_URL_TEMPLATE.format( - email=self.test_learner['original_email'] - ), - json={'vid': self.test_vid}, - status_code=status_code - ) - - def _mock_delete(self, req_mock, status_code): - req_mock.delete( - hubspot_api.DELETE_USER_FROM_VID_TEMPLATE.format( - vid=self.test_vid - ), - json={}, - status_code=status_code - ) - - def test_delete_no_email(self, req_mock, mock_alert): # pylint: disable=unused-argument - with self.assertRaises(TypeError) as exc: - HubspotAPI( - self.api_key, - self.test_region, - self.from_address, - self.alert_email - ).delete_user({}) - self.assertIn('Expected an email address for user to delete, but received None.', str(exc)) - mock_alert.assert_not_called() - - def test_delete_success(self, req_mock, mock_alert): - self._mock_get_vid(req_mock, 200) - self._mock_delete(req_mock, 200) - logger = logging.getLogger('tubular.hubspot_api') - - with mock.patch.object(logger, 'info') as mock_info: - HubspotAPI( - self.api_key, - self.test_region, - self.from_address, - self.alert_email - ).delete_user(self.test_learner) - mock_info.assert_called_once_with("User successfully deleted from Hubspot") - mock_alert.assert_called_once_with(12345) - - def test_delete_email_does_not_exist(self, req_mock, mock_alert): - self._mock_get_vid(req_mock, 404) - logger = logging.getLogger('tubular.hubspot_api') - with mock.patch.object(logger, 'info') as mock_info: - HubspotAPI( - self.api_key, - self.test_region, - self.from_address, - self.alert_email - ).delete_user(self.test_learner) - mock_info.assert_called_once_with("No action taken because no user was found in Hubspot.") - mock_alert.assert_not_called() - - def test_delete_server_failure_on_user_retrieval(self, req_mock, mock_alert): - self._mock_get_vid(req_mock, 500) - with self.assertRaises(hubspot_api.HubspotException) as exc: - HubspotAPI( - self.api_key, - self.test_region, - self.from_address, - self.alert_email - ).delete_user(self.test_learner) - self.assertIn("Error attempted to get user_vid from Hubspot", str(exc)) - mock_alert.assert_not_called() - - def test_delete_unauthorized_deletion(self, req_mock, mock_alert): - self._mock_get_vid(req_mock, 200) - self._mock_delete(req_mock, 401) - with self.assertRaises(hubspot_api.HubspotException) as exc: - HubspotAPI( - self.api_key, - self.test_region, - self.from_address, - self.alert_email - ).delete_user(self.test_learner) - self.assertIn("Hubspot user deletion failed due to authorized API call", str(exc)) - mock_alert.assert_not_called() - - def test_delete_vid_not_found(self, req_mock, mock_alert): - self._mock_get_vid(req_mock, 200) - self._mock_delete(req_mock, 404) - with self.assertRaises(hubspot_api.HubspotException) as exc: - HubspotAPI( - self.api_key, - self.test_region, - self.from_address, - self.alert_email - ).delete_user(self.test_learner) - self.assertIn("Hubspot user deletion failed because vid doesn't match user", str(exc)) - mock_alert.assert_not_called() - - def test_delete_server_failure_on_deletion(self, req_mock, mock_alert): - self._mock_get_vid(req_mock, 200) - self._mock_delete(req_mock, 500) - with self.assertRaises(hubspot_api.HubspotException) as exc: - HubspotAPI( - self.api_key, - self.test_region, - self.from_address, - self.alert_email - ).delete_user(self.test_learner) - self.assertIn("Hubspot user deletion failed due to server-side (Hubspot) issues", str(exc)) - mock_alert.assert_not_called() - - def test_delete_catch_all_on_deletion(self, req_mock, mock_alert): - self._mock_get_vid(req_mock, 200) - # Testing 403 as it's not a response type per the Hubspot API docs, so it doesn't have it's own error. - self._mock_delete(req_mock, 403) - with self.assertRaises(hubspot_api.HubspotException) as exc: - HubspotAPI( - self.api_key, - self.test_region, - self.from_address, - self.alert_email - ).delete_user(self.test_learner) - self.assertIn("Hubspot user deletion failed due to unknown reasons", str(exc)) - mock_alert.assert_not_called() diff --git a/tubular/tests/test_jenkins.py b/tubular/tests/test_jenkins.py deleted file mode 100644 index c1e7d836..00000000 --- a/tubular/tests/test_jenkins.py +++ /dev/null @@ -1,193 +0,0 @@ -""" -Tests for triggering a Jenkins job. -""" - -from itertools import islice -import json -import re -import unittest - -import backoff -import ddt -from mock import Mock, mock_open, patch, call -import requests_mock - -from tubular.exception import BackendError -import tubular.jenkins as jenkins - -BASE_URL = u'https://test-jenkins' -USER_ID = u'foo' -USER_TOKEN = u'12345678901234567890123456789012' -JOB = u'test-job' -TOKEN = u'asdf' -BUILD_NUM = 456 -JOBS_URL = u'{}/job/{}/'.format(BASE_URL, JOB) -JOB_URL = u'{}{}'.format(JOBS_URL, BUILD_NUM) -MOCK_BUILD = {u'number': BUILD_NUM, u'url': JOB_URL} -MOCK_JENKINS_DATA = {'jobs': [{'name': JOB, 'url': JOBS_URL, 'color': 'blue'}]} -MOCK_BUILDS_DATA = { - 'actions': [ - {'parameterDefinitions': [ - {'defaultParameterValue': {'value': '0'}, 'name': 'EXIT_CODE', 'type': 'StringParameterDefinition'} - ]} - ], - 'builds': [MOCK_BUILD], - 'lastBuild': MOCK_BUILD -} -MOCK_QUEUE_DATA = { - 'id': 123, - 'task': {'name': JOB, 'url': JOBS_URL}, - 'executable': {'number': BUILD_NUM, 'url': JOB_URL} -} -MOCK_BUILD_DATA = { - 'actions': [{}], - 'fullDisplayName': 'foo', - 'number': BUILD_NUM, - 'result': 'SUCCESS', - 'url': JOB_URL, -} -MOCK_CRUMB_DATA = { - 'crumbRequestField': 'Jenkins-Crumb', - 'crumb': '1234567890' -} - - -class TestProperties(unittest.TestCase): - """ - Test the Jenkins property-creating methods. - """ - - def test_properties_files(self): - learners = [ - { - 'original_username': 'learnerA' - }, - { - 'original_username': 'learnerB' - }, - ] - open_mocker = mock_open() - with patch('tubular.jenkins.open', open_mocker, create=True): - jenkins._recreate_directory = Mock() # pylint: disable=protected-access - jenkins.export_learner_job_properties(learners, "tmpdir") - jenkins._recreate_directory.assert_called_once() # pylint: disable=protected-access - self.assertIn(call('tmpdir/learner_retire_learnera', 'w'), open_mocker.call_args_list) - self.assertIn(call('tmpdir/learner_retire_learnerb', 'w'), open_mocker.call_args_list) - handle = open_mocker() - self.assertIn(call('RETIREMENT_USERNAME=learnerA\n'), handle.write.call_args_list) - self.assertIn(call('RETIREMENT_USERNAME=learnerB\n'), handle.write.call_args_list) - - -@ddt.ddt -class TestBackoff(unittest.TestCase): - u""" - Test of custom backoff code (wait time generator and max_tries) - """ - - @ddt.data( - (2, 1, 1, 2, [1]), - (2, 1, 2, 3, [1, 1]), - (2, 1, 3, 3, [1, 2]), - (2, 100, 90, 2, [90]), - (2, 1, 90, 8, [1, 2, 4, 8, 16, 32, 27]), - (3, 5, 1000, 7, [5, 15, 45, 135, 405, 395]), - (2, 1, 3600, 13, [1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 1553]), - ) - @ddt.unpack - def test_max_timeout(self, base, factor, timeout, expected_max_tries, expected_waits): - # pylint: disable=protected-access - wait_gen, max_tries = jenkins._backoff_timeout(timeout, base, factor) - self.assertEqual(expected_max_tries, max_tries) - - # Use max_tries-1, because we only wait that many times - waits = list(islice(wait_gen(), max_tries - 1)) - self.assertEqual(expected_waits, waits) - - self.assertEqual(timeout, sum(waits)) - - def test_backoff_call(self): - # pylint: disable=protected-access - wait_gen, max_tries = jenkins._backoff_timeout(timeout=.36, base=2, factor=.0001) - always_false = Mock(return_value=False) - - count_retries = backoff.on_predicate( - wait_gen, - max_tries=max_tries, - on_backoff=print, - jitter=None, - )(always_false.__call__) - - count_retries() - - self.assertEqual(always_false.call_count, 13) - - -@ddt.ddt -class TestJenkinsAPI(unittest.TestCase): - """ - Tests for interacting with the Jenkins API - """ - - @requests_mock.Mocker() - def test_failure(self, mock): - """ - Test the failure condition when triggering a jenkins job - """ - # Mock all network interactions - mock.get( - re.compile(".*"), - status_code=404, - ) - with self.assertRaises(BackendError): - jenkins.trigger_build(BASE_URL, USER_ID, USER_TOKEN, JOB, TOKEN, None, ()) - - @ddt.data( - (None, ()), - ('my cause', ()), - (None, ((u'FOO', u'bar'),)), - (None, ((u'FOO', u'bar'), (u'BAZ', u'biz'))), - ('my cause', ((u'FOO', u'bar'),)), - ) - @ddt.unpack - @requests_mock.Mocker() - def test_success(self, cause, param, mock): - u""" - Test triggering a jenkins job - """ - - def text_callback(request, context): - u""" What to return from the mock. """ - # This is the initial call that jenkinsapi uses to - # establish connectivity to Jenkins - # https://test-jenkins/api/python?tree=jobs[name,color,url] - context.status_code = 200 - if request.url.startswith(u'https://test-jenkins/api/python'): - return json.dumps(MOCK_JENKINS_DATA) - elif request.url.startswith(u'https://test-jenkins/job/test-job/456'): - return json.dumps(MOCK_BUILD_DATA) - elif request.url.startswith(u'https://test-jenkins/job/test-job'): - return json.dumps(MOCK_BUILDS_DATA) - elif request.url.startswith(u'https://test-jenkins/queue/item/123/api/python'): - return json.dumps(MOCK_QUEUE_DATA) - elif request.url.startswith(u'https://test-jenkins/crumbIssuer/api/python'): - return json.dumps(MOCK_CRUMB_DATA) - else: - # We should never get here, unless the jenkinsapi implementation changes. - # This response will catch that condition. - context.status_code = 500 - return None - - # Mock all network interactions - mock.get( - re.compile('.*'), - text=text_callback - ) - mock.post( - '{}/job/test-job/buildWithParameters'.format(BASE_URL), - status_code=201, # Jenkins responds with a 201 Created on success - headers={'location': '{}/queue/item/123'.format(BASE_URL)} - ) - - # Make the call to the Jenkins API - result = jenkins.trigger_build(BASE_URL, USER_ID, USER_TOKEN, JOB, TOKEN, cause, param) - self.assertEqual(result, 'SUCCESS') diff --git a/tubular/tests/test_retire_one_learner.py b/tubular/tests/test_retire_one_learner.py deleted file mode 100644 index 7924a5d6..00000000 --- a/tubular/tests/test_retire_one_learner.py +++ /dev/null @@ -1,415 +0,0 @@ -""" -Test the retire_one_learner.py script -""" - -from click.testing import CliRunner -from mock import DEFAULT, patch - -from tubular.exception import HttpDoesNotExistException -from tubular.scripts.retire_one_learner import ( - END_STATES, - ERR_BAD_CONFIG, - ERR_BAD_LEARNER, - ERR_SETUP_FAILED, - ERR_UNKNOWN_STATE, - ERR_USER_AT_END_STATE, - ERR_USER_IN_WORKING_STATE, - retire_learner -) -from tubular.tests.retirement_helpers import ( - fake_config_file, - get_fake_user_retirement -) - - -def _call_script(username, fetch_ecom_segment_id=False): - """ - Call the retired learner script with the given username and a generic, temporary config file. - Returns the CliRunner.invoke results - """ - runner = CliRunner() - with runner.isolated_filesystem(): - with open('test_config.yml', 'w') as f: - fake_config_file(f, fetch_ecom_segment_id=fetch_ecom_segment_id) - result = runner.invoke(retire_learner, args=['--username', username, '--config_file', 'test_config.yml']) - print(result) - print(result.output) - return result - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch('tubular.edx_api.EcommerceApi.get_tracking_key') -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learner_retirement_state=DEFAULT, - update_learner_retirement_state=DEFAULT, - retirement_retire_forum=DEFAULT, - retirement_retire_mailings=DEFAULT, - retirement_unenroll=DEFAULT, - retirement_lms_retire=DEFAULT -) -def test_successful_retirement(*args, **kwargs): - username = 'test_username' - - mock_get_access_token = args[1] - mock_get_retirement_state = kwargs['get_learner_retirement_state'] - mock_update_learner_state = kwargs['update_learner_retirement_state'] - mock_retire_forum = kwargs['retirement_retire_forum'] - mock_retire_mailings = kwargs['retirement_retire_mailings'] - mock_unenroll = kwargs['retirement_unenroll'] - mock_lms_retire = kwargs['retirement_lms_retire'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_get_retirement_state.return_value = get_fake_user_retirement(original_username=username) - - result = _call_script(username, fetch_ecom_segment_id=True) - - # Called once per API we instantiate (LMS, ECommerce, Credentials) - assert mock_get_access_token.call_count == 3 - mock_get_retirement_state.assert_called_once_with(username) - assert mock_update_learner_state.call_count == 9 - - # Called once per retirement - for mock_call in ( - mock_retire_forum, - mock_retire_mailings, - mock_unenroll, - mock_lms_retire - ): - mock_call.assert_called_once_with(mock_get_retirement_state.return_value) - - assert result.exit_code == 0 - assert 'Retirement complete' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learner_retirement_state=DEFAULT, - update_learner_retirement_state=DEFAULT -) -def test_user_does_not_exist(*args, **kwargs): - username = 'test_username' - - mock_get_access_token = args[0] - mock_get_retirement_state = kwargs['get_learner_retirement_state'] - mock_update_learner_state = kwargs['update_learner_retirement_state'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_get_retirement_state.side_effect = Exception - - result = _call_script(username) - - assert mock_get_access_token.call_count == 3 - mock_get_retirement_state.assert_called_once_with(username) - mock_update_learner_state.assert_not_called() - - assert result.exit_code == ERR_SETUP_FAILED - assert 'Exception' in result.output - - -def test_bad_config(): - username = 'test_username' - runner = CliRunner() - result = runner.invoke(retire_learner, args=['--username', username, '--config_file', 'does_not_exist.yml']) - assert result.exit_code == ERR_BAD_CONFIG - assert 'does_not_exist.yml' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learner_retirement_state=DEFAULT, - update_learner_retirement_state=DEFAULT -) -def test_bad_learner(*args, **kwargs): - username = 'test_username' - - mock_get_access_token = args[0] - mock_get_retirement_state = kwargs['get_learner_retirement_state'] - mock_update_learner_state = kwargs['update_learner_retirement_state'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - - # Broken API call, no state returned - mock_get_retirement_state.side_effect = HttpDoesNotExistException - result = _call_script(username) - - assert mock_get_access_token.call_count == 3 - mock_get_retirement_state.assert_called_once_with(username) - mock_update_learner_state.assert_not_called() - - assert result.exit_code == ERR_BAD_LEARNER - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learner_retirement_state=DEFAULT, - update_learner_retirement_state=DEFAULT -) -def test_user_in_working_state(*args, **kwargs): - username = 'test_username' - - mock_get_access_token = args[0] - mock_get_retirement_state = kwargs['get_learner_retirement_state'] - mock_update_learner_state = kwargs['update_learner_retirement_state'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_get_retirement_state.return_value = get_fake_user_retirement( - original_username=username, - current_state_name='RETIRING_FORUMS' - ) - - result = _call_script(username) - - assert mock_get_access_token.call_count == 3 - mock_get_retirement_state.assert_called_once_with(username) - mock_update_learner_state.assert_not_called() - - assert result.exit_code == ERR_USER_IN_WORKING_STATE - assert 'in a working state' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learner_retirement_state=DEFAULT, - update_learner_retirement_state=DEFAULT -) -def test_user_in_bad_state(*args, **kwargs): - username = 'test_username' - bad_state = 'BOGUS_STATE' - mock_get_access_token = args[0] - mock_get_retirement_state = kwargs['get_learner_retirement_state'] - mock_update_learner_state = kwargs['update_learner_retirement_state'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_get_retirement_state.return_value = get_fake_user_retirement( - original_username=username, - current_state_name=bad_state - ) - result = _call_script(username) - - assert mock_get_access_token.call_count == 3 - mock_get_retirement_state.assert_called_once_with(username) - mock_update_learner_state.assert_not_called() - - assert result.exit_code == ERR_UNKNOWN_STATE - assert bad_state in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learner_retirement_state=DEFAULT, - update_learner_retirement_state=DEFAULT -) -def test_user_in_end_state(*args, **kwargs): - username = 'test_username' - - mock_get_access_token = args[0] - mock_get_retirement_state = kwargs['get_learner_retirement_state'] - mock_update_learner_state = kwargs['update_learner_retirement_state'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - - # pytest.parameterize doesn't play nicely with patch.multiple, this seemed more - # readable than the alternatives. - for end_state in END_STATES: - mock_get_retirement_state.return_value = { - 'original_username': username, - 'current_state': { - 'state_name': end_state - } - } - - result = _call_script(username) - - assert mock_get_access_token.call_count == 3 - mock_get_retirement_state.assert_called_once_with(username) - mock_update_learner_state.assert_not_called() - - assert result.exit_code == ERR_USER_AT_END_STATE - assert end_state in result.output - - # Reset our call counts for the next test - mock_get_access_token.reset_mock() - mock_get_retirement_state.reset_mock() - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learner_retirement_state=DEFAULT, - update_learner_retirement_state=DEFAULT, - retirement_retire_forum=DEFAULT, - retirement_retire_mailings=DEFAULT, - retirement_unenroll=DEFAULT, - retirement_lms_retire=DEFAULT -) -def test_skipping_states(*args, **kwargs): - username = 'test_username' - - mock_get_access_token = args[0] - mock_get_retirement_state = kwargs['get_learner_retirement_state'] - mock_update_learner_state = kwargs['update_learner_retirement_state'] - mock_retire_forum = kwargs['retirement_retire_forum'] - mock_retire_mailings = kwargs['retirement_retire_mailings'] - mock_unenroll = kwargs['retirement_unenroll'] - mock_lms_retire = kwargs['retirement_lms_retire'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_get_retirement_state.return_value = get_fake_user_retirement( - original_username=username, - current_state_name='EMAIL_LISTS_COMPLETE' - ) - - result = _call_script(username) - - # Called once per API we instantiate (LMS, ECommerce, Credentials) - assert mock_get_access_token.call_count == 3 - mock_get_retirement_state.assert_called_once_with(username) - assert mock_update_learner_state.call_count == 5 - - # Skipped - for mock_call in ( - mock_retire_forum, - mock_retire_mailings - ): - mock_call.assert_not_called() - - # Called once per retirement - for mock_call in ( - mock_unenroll, - mock_lms_retire - ): - mock_call.assert_called_once_with(mock_get_retirement_state.return_value) - - assert result.exit_code == 0 - - for required_output in ( - 'RETIRING_FORUMS completed in previous run', - 'RETIRING_EMAIL_LISTS completed in previous run', - 'Starting state RETIRING_ENROLLMENTS', - 'State RETIRING_ENROLLMENTS completed', - 'Starting state RETIRING_LMS', - 'State RETIRING_LMS completed', - 'Retirement complete' - ): - assert required_output in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch('tubular.edx_api.EcommerceApi.get_tracking_key') -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learner_retirement_state=DEFAULT, - update_learner_retirement_state=DEFAULT, - retirement_retire_forum=DEFAULT, - retirement_retire_mailings=DEFAULT, - retirement_unenroll=DEFAULT, - retirement_lms_retire=DEFAULT -) -def test_get_segment_id_success(*args, **kwargs): - username = 'test_username' - - mock_get_tracking_key = args[0] - mock_get_access_token = args[1] - mock_get_retirement_state = kwargs['get_learner_retirement_state'] - mock_retirement_retire_forum = kwargs['retirement_retire_forum'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_get_tracking_key.return_value = {'id': 1, 'ecommerce_tracking_id': 'ecommerce-1'} - - # The learner starts off with these values, 'ecommerce_segment_id' is added during script - # startup - mock_get_retirement_state.return_value = get_fake_user_retirement( - original_username=username, - ) - - _call_script(username, fetch_ecom_segment_id=True) - mock_get_tracking_key.assert_called_once_with(mock_get_retirement_state.return_value) - - config_after_get_segment_id = mock_get_retirement_state.return_value - config_after_get_segment_id['ecommerce_segment_id'] = 'ecommerce-1' - - mock_retirement_retire_forum.assert_called_once_with(config_after_get_segment_id) - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch('tubular.edx_api.EcommerceApi.get_tracking_key') -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learner_retirement_state=DEFAULT, - update_learner_retirement_state=DEFAULT, - retirement_retire_forum=DEFAULT, - retirement_retire_mailings=DEFAULT, - retirement_unenroll=DEFAULT, - retirement_lms_retire=DEFAULT -) -def test_get_segment_id_not_found(*args, **kwargs): - username = 'test_username' - - mock_get_tracking_key = args[0] - mock_get_access_token = args[1] - mock_get_retirement_state = kwargs['get_learner_retirement_state'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_get_tracking_key.side_effect = HttpDoesNotExistException('{} not found'.format(username)) - - mock_get_retirement_state.return_value = get_fake_user_retirement( - original_username=username, - ) - - result = _call_script(username, fetch_ecom_segment_id=True) - mock_get_tracking_key.assert_called_once_with(mock_get_retirement_state.return_value) - assert 'Setting Ecommerce Segment ID to None' in result.output - - # Reset our call counts for the next test - mock_get_access_token.reset_mock() - mock_get_retirement_state.reset_mock() - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch('tubular.edx_api.EcommerceApi.get_tracking_key') -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learner_retirement_state=DEFAULT, - update_learner_retirement_state=DEFAULT, - retirement_retire_forum=DEFAULT, - retirement_retire_mailings=DEFAULT, - retirement_unenroll=DEFAULT, - retirement_lms_retire=DEFAULT -) -def test_get_segment_id_error(*args, **kwargs): - username = 'test_username' - - mock_get_tracking_key = args[0] - mock_get_access_token = args[1] - mock_get_retirement_state = kwargs['get_learner_retirement_state'] - mock_update_learner_state = kwargs['update_learner_retirement_state'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - - test_exception_message = 'Test Exception!' - mock_get_tracking_key.side_effect = Exception(test_exception_message) - - mock_get_retirement_state.return_value = get_fake_user_retirement( - original_username=username, - ) - - mock_get_retirement_state.return_value = { - 'original_username': username, - 'current_state': { - 'state_name': 'PENDING' - } - } - - result = _call_script(username, fetch_ecom_segment_id=True) - mock_get_tracking_key.assert_called_once_with(mock_get_retirement_state.return_value) - mock_update_learner_state.assert_not_called() - - assert result.exit_code == ERR_SETUP_FAILED - assert 'Unexpected error fetching Ecommerce tracking id!' in result.output - assert test_exception_message in result.output diff --git a/tubular/tests/test_retirement_archive_and_cleanup.py b/tubular/tests/test_retirement_archive_and_cleanup.py deleted file mode 100644 index f8b56307..00000000 --- a/tubular/tests/test_retirement_archive_and_cleanup.py +++ /dev/null @@ -1,271 +0,0 @@ -""" -Test the retirement_archive_and_cleanup.py script -""" - - -import datetime -import os -import unittest.mock as mock - -import boto3 -import pytest -from botocore.exceptions import ClientError -from click.testing import CliRunner -from mock import DEFAULT, call, patch -from moto import mock_ec2, mock_s3 - -from tubular.scripts.retirement_archive_and_cleanup import ( - ERR_ARCHIVING, ERR_BAD_CLI_PARAM, ERR_BAD_CONFIG, ERR_DELETING, - ERR_FETCHING, ERR_NO_CONFIG, ERR_SETUP_FAILED, _upload_to_s3, - archive_and_cleanup) -from tubular.tests.retirement_helpers import (fake_config_file, - get_fake_user_retirement) - -FAKE_BUCKET_NAME = "fake_test_bucket" - - -def _call_script(cool_off_days=37, batch_size=None, dry_run=None, start_date=None, end_date=None): - """ - Call the archive script with the given params and a generic config file. - Returns the CliRunner.invoke results - """ - runner = CliRunner() - with runner.isolated_filesystem(): - with open('test_config.yml', 'w') as f: - fake_config_file(f) - - base_args = [ - '--config_file', 'test_config.yml', - '--cool_off_days', cool_off_days, - ] - if batch_size: - base_args += ['--batch_size', batch_size] - if dry_run: - base_args += ['--dry_run', dry_run] - if start_date: - base_args += ['--start_date', start_date] - if end_date: - base_args += ['--end_date', end_date] - - result = runner.invoke(archive_and_cleanup, args=base_args) - print(result) - print(result.output) - return result - - -def _fake_learner(ordinal): - """ - Creates a simple fake learner - """ - return get_fake_user_retirement( - user_id=ordinal, - original_username='test{}'.format(ordinal), - original_email='test{}@edx.invalid'.format(ordinal), - original_name='test {}'.format(ordinal), - retired_username='retired_{}'.format(ordinal), - retired_email='retired_test{}@edx.invalid'.format(ordinal), - last_state_name='COMPLETE' - ) - - -def fake_learners_to_retire(): - """ - A simple hard-coded list of fake learners - """ - return [ - _fake_learner(1), - _fake_learner(2), - _fake_learner(3) - ] - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learners_by_date_and_status=DEFAULT, - bulk_cleanup_retirements=DEFAULT -) -@mock_s3 -def test_successful(*args, **kwargs): - conn = boto3.resource('s3') - conn.create_bucket(Bucket=FAKE_BUCKET_NAME) - - mock_get_access_token = args[0] - mock_get_learners = kwargs['get_learners_by_date_and_status'] - mock_bulk_cleanup_retirements = kwargs['bulk_cleanup_retirements'] - - mock_get_learners.return_value = fake_learners_to_retire() - - result = _call_script() - - # Called once to get the LMS token - assert mock_get_access_token.call_count == 1 - mock_get_learners.assert_called_once() - mock_bulk_cleanup_retirements.assert_called_once_with( - ['test1', 'test2', 'test3']) - - assert result.exit_code == 0 - assert 'Archive and cleanup complete' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learners_by_date_and_status=DEFAULT, - bulk_cleanup_retirements=DEFAULT -) -@mock_ec2 -@mock_s3 -def test_successful_with_batching(*args, **kwargs): - conn = boto3.resource('s3') - conn.create_bucket(Bucket=FAKE_BUCKET_NAME) - - mock_get_access_token = args[0] - mock_get_learners = kwargs['get_learners_by_date_and_status'] - mock_bulk_cleanup_retirements = kwargs['bulk_cleanup_retirements'] - - mock_get_learners.return_value = fake_learners_to_retire() - - result = _call_script(batch_size=2) - - # Called once to get the LMS token - assert mock_get_access_token.call_count == 1 - mock_get_learners.assert_called_once() - get_learner_calls = [call(['test1', 'test2']), call(['test3'])] - mock_bulk_cleanup_retirements.assert_has_calls(get_learner_calls) - - assert result.exit_code == 0 - assert 'Archive and cleanup complete for batch #1' in result.output - assert 'Archive and cleanup complete for batch #2' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learners_by_date_and_status=DEFAULT, - bulk_cleanup_retirements=DEFAULT -) -@mock_s3 -def test_successful_dry_run(*args, **kwargs): - mock_get_access_token = args[0] - mock_get_learners = kwargs['get_learners_by_date_and_status'] - mock_bulk_cleanup_retirements = kwargs['bulk_cleanup_retirements'] - - mock_get_learners.return_value = fake_learners_to_retire() - - result = _call_script(dry_run=True) - - # Called once to get the LMS token - assert mock_get_access_token.call_count == 1 - mock_get_learners.assert_called_once() - mock_bulk_cleanup_retirements.assert_not_called() - - assert result.exit_code == 0 - assert 'Dry run. Skipping the step to upload data to' in result.output - assert 'This is a dry-run. Exiting before any retirements are cleaned up' in result.output - - -def test_no_config(): - runner = CliRunner() - result = runner.invoke( - archive_and_cleanup, - args=[ - '--cool_off_days', 37 - ] - ) - assert result.exit_code == ERR_NO_CONFIG - assert 'No config file passed in.' in result.output - - -def test_bad_config(): - runner = CliRunner() - result = runner.invoke( - archive_and_cleanup, - args=[ - '--config_file', 'does_not_exist.yml', - '--cool_off_days', 37 - ] - ) - assert result.exit_code == ERR_BAD_CONFIG - assert 'does_not_exist.yml' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch('tubular.edx_api.LmsApi.__init__', side_effect=Exception) -def test_setup_failed(*_): - result = _call_script() - assert result.exit_code == ERR_SETUP_FAILED - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch('tubular.edx_api.LmsApi.get_learners_by_date_and_status', side_effect=Exception) -def test_bad_fetch(*_): - result = _call_script() - assert result.exit_code == ERR_FETCHING - assert 'Unexpected error occurred fetching users to update!' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch('tubular.edx_api.LmsApi.get_learners_by_date_and_status', return_value=fake_learners_to_retire()) -@patch('tubular.edx_api.LmsApi.bulk_cleanup_retirements', side_effect=Exception) -@patch('tubular.scripts.retirement_archive_and_cleanup._upload_to_s3') -def test_bad_lms_deletion(*_): - result = _call_script() - assert result.exit_code == ERR_DELETING - assert 'Unexpected error occurred deleting retirements!' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch('tubular.edx_api.LmsApi.get_learners_by_date_and_status', return_value=fake_learners_to_retire()) -@patch('tubular.edx_api.LmsApi.bulk_cleanup_retirements') -@patch('tubular.scripts.retirement_archive_and_cleanup._upload_to_s3', side_effect=Exception) -def test_bad_s3_upload(*_): - result = _call_script() - assert result.exit_code == ERR_ARCHIVING - assert 'Unexpected error occurred archiving retirements!' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -def test_conflicting_dates(*_): - result = _call_script(start_date=datetime.datetime( - 2021, 10, 10), end_date=datetime.datetime(2018, 10, 10)) - assert result.exit_code == ERR_BAD_CLI_PARAM - assert 'Conflicting start and end dates passed on CLI' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch( - 'tubular.scripts.retirement_archive_and_cleanup._get_utc_now', - return_value=datetime.datetime(2021, 2, 2, 0, 0) -) -def test_conflicting_cool_off_date(*_): - result = _call_script( - cool_off_days=10, - start_date=datetime.datetime(2021, 1, 1), end_date=datetime.datetime(2021, 2, 1) - ) - assert result.exit_code == ERR_BAD_CLI_PARAM - assert 'End date cannot occur within the cool_off_days period' in result.output - - -@mock_s3 -def test_s3_upload_data(): - """ - Test case to verify s3 upload and download. - """ - s3 = boto3.client("s3") - s3.create_bucket(Bucket=FAKE_BUCKET_NAME) - config = {'s3_archive': {'bucket_name': FAKE_BUCKET_NAME}} - filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_data', 'uploading.txt') - key = 'raw/' + datetime.datetime.now().strftime('%Y/%m/') + filename - - # first try dry run without uploading. Try to get object should raise error - with pytest.raises(ClientError) as exc_info: - _upload_to_s3(config, filename, True) - s3.get_object(Bucket=FAKE_BUCKET_NAME, Key=key) - assert exc_info.value.response['Error']['Code'] == 'NoSuchKey' - - # upload a file, download and compare its content. - _upload_to_s3(config, filename, False) - resp = s3.get_object(Bucket=FAKE_BUCKET_NAME, Key=key) - data = resp["Body"].read() - assert data.decode() == "Upload this file on s3 in tests." diff --git a/tubular/tests/test_retirement_bulk_status_update.py b/tubular/tests/test_retirement_bulk_status_update.py deleted file mode 100644 index fd917ea4..00000000 --- a/tubular/tests/test_retirement_bulk_status_update.py +++ /dev/null @@ -1,180 +0,0 @@ -""" -Test the retirement_bulk_status_update.py script -""" - - -from mock import patch, DEFAULT - -from click.testing import CliRunner - -from tubular.scripts.retirement_bulk_status_update import ( - ERR_BAD_CONFIG, - ERR_FETCHING, - ERR_NO_CONFIG, - ERR_SETUP_FAILED, - ERR_UPDATING, - update_statuses -) -from tubular.tests.retirement_helpers import fake_config_file, get_fake_user_retirement - - -def _call_script(initial_state='COMPLETE', new_state='PENDING', start_date='2018-01-01', end_date='2018-01-15', rewind_state=False): - """ - Call the bulk update statuses script with the given params and a generic config file. - Returns the CliRunner.invoke results - """ - runner = CliRunner() - with runner.isolated_filesystem(): - with open('test_config.yml', 'w') as f: - fake_config_file(f) - args = [ - '--config_file', 'test_config.yml', - '--initial_state', initial_state, - '--start_date', start_date, - '--end_date', end_date - ] - args.extend(['--new_state', new_state]) if new_state else None - args.append('--rewind-state') if rewind_state else None - result = runner.invoke( - update_statuses, - args=args - ) - print(result) - print(result.output) - return result - - -def fake_learners_to_retire(**overrides): - """ - A simple hard-coded list of fake learners with the only piece of - information this script cares about. - """ - - return [ - get_fake_user_retirement(**{"original_username": "user1", **overrides}), - get_fake_user_retirement(**{"original_username": "user2", **overrides}), - get_fake_user_retirement(**{"original_username": "user3", **overrides}), - ] - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learners_by_date_and_status=DEFAULT, - update_learner_retirement_state=DEFAULT -) -def test_successful_update(*args, **kwargs): - mock_get_access_token = args[0] - mock_get_learners = kwargs['get_learners_by_date_and_status'] - mock_update_learner_state = kwargs['update_learner_retirement_state'] - - mock_get_learners.return_value = fake_learners_to_retire() - - result = _call_script() - - # Called once to get the LMS token - assert mock_get_access_token.call_count == 1 - mock_get_learners.assert_called_once() - assert mock_update_learner_state.call_count == 3 - - assert result.exit_code == 0 - assert 'Bulk update complete' in result.output - - -def test_no_config(): - runner = CliRunner() - result = runner.invoke( - update_statuses, - args=[ - '--initial_state', 'COMPLETE', - '--new_state', 'PENDING', - '--start_date', '2018-01-01', - '--end_date', '2018-01-15' - ] - ) - assert result.exit_code == ERR_NO_CONFIG - assert 'No config file passed in.' in result.output - - -def test_bad_config(): - runner = CliRunner() - result = runner.invoke( - update_statuses, - args=[ - '--config_file', 'does_not_exist.yml', - '--initial_state', 'COMPLETE', - '--new_state', 'PENDING', - '--start_date', '2018-01-01', - '--end_date', '2018-01-15' - ] - ) - assert result.exit_code == ERR_BAD_CONFIG - assert 'does_not_exist.yml' in result.output - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learners_by_date_and_status=DEFAULT, - update_learner_retirement_state=DEFAULT -) -def test_successful_rewind(*args, **kwargs): - mock_get_access_token = args[0] - mock_get_learners = kwargs['get_learners_by_date_and_status'] - mock_update_learner_state = kwargs['update_learner_retirement_state'] - - mock_get_learners.return_value = fake_learners_to_retire(current_state_name='ERRORED') - - result = _call_script(new_state=None, rewind_state=True) - - # Called once to get the LMS token - assert mock_get_access_token.call_count == 1 - mock_get_learners.assert_called_once() - assert mock_update_learner_state.call_count == 3 - - assert result.exit_code == 0 - assert 'Bulk update complete' in result.output - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch.multiple( - 'tubular.edx_api.LmsApi', - get_learners_by_date_and_status=DEFAULT, - update_learner_retirement_state=DEFAULT -) -def test_rewind_bad_args(*args, **kwargs): - mock_get_access_token = args[0] - mock_get_learners = kwargs['get_learners_by_date_and_status'] - - mock_get_learners.return_value = fake_learners_to_retire(current_state_name='ERRORED') - - result = _call_script(rewind_state=True) - - # Called once to get the LMS token - assert mock_get_access_token.call_count == 1 - mock_get_learners.assert_called_once() - - assert result.exit_code == ERR_BAD_CONFIG - assert 'boolean rewind_state or a new state to set learners to' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch('tubular.edx_api.LmsApi.__init__', side_effect=Exception) -def test_setup_failed(*_): - result = _call_script() - assert result.exit_code == ERR_SETUP_FAILED - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch('tubular.edx_api.LmsApi.get_learners_by_date_and_status', side_effect=Exception) -def test_bad_fetch(*_): - result = _call_script() - assert result.exit_code == ERR_FETCHING - assert 'Unexpected error occurred fetching users to update!' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token', return_value=('THIS_IS_A_JWT', None)) -@patch('tubular.edx_api.LmsApi.get_learners_by_date_and_status', return_value=fake_learners_to_retire()) -@patch('tubular.edx_api.LmsApi.update_learner_retirement_state', side_effect=Exception) -def test_bad_update(*_): - result = _call_script() - assert result.exit_code == ERR_UPDATING - assert 'Unexpected error occurred updating users!' in result.output diff --git a/tubular/tests/test_retirement_partner_report.py b/tubular/tests/test_retirement_partner_report.py deleted file mode 100644 index d6c13741..00000000 --- a/tubular/tests/test_retirement_partner_report.py +++ /dev/null @@ -1,809 +0,0 @@ -# coding=utf-8 -""" -Test the retire_one_learner.py script -""" - - -import csv -import os -import unicodedata -from datetime import date -import time - -from click.testing import CliRunner -from mock import DEFAULT, patch -from six import PY2, itervalues - -from tubular.scripts.retirement_partner_report import ( - DEFAULT_FIELD_HEADINGS, - ERR_BAD_CONFIG, - ERR_BAD_SECRETS, - ERR_CLEANUP, - ERR_FETCHING_LEARNERS, - ERR_NO_CONFIG, - ERR_NO_SECRETS, - ERR_NO_OUTPUT_DIR, - ERR_REPORTING, - ERR_SETUP_FAILED, - ERR_UNKNOWN_ORG, - ERR_DRIVE_LISTING, - LEARNER_CREATED_KEY, - LEARNER_ORIGINAL_USERNAME_KEY, - ORGS_CONFIG_FIELD_HEADINGS_KEY, - ORGS_CONFIG_KEY, - ORGS_CONFIG_LEARNERS_KEY, - ORGS_CONFIG_ORG_KEY, - ORGS_KEY, - REPORTING_FILENAME_PREFIX, - SETUP_LMS_OR_EXIT, - generate_report, - _generate_report_files_or_exit, # pylint: disable=protected-access - _get_orgs_and_learners_or_exit, # pylint: disable=protected-access -) - -from tubular.tests.retirement_helpers import fake_config_file, fake_google_secrets_file, flatten_partner_list, FAKE_ORGS, TEST_PLATFORM_NAME - -TEST_CONFIG_YML_NAME = 'test_config.yml' -TEST_GOOGLE_SECRETS_FILENAME = 'test_google_secrets.json' -DELETION_TIME = time.strftime("%Y-%m-%dT%H:%M:%S") -UNICODE_NAME_CONSTANT = '阿碧' -USER_ID = '12345' -TEST_ORGS_CONFIG = [ - { - ORGS_CONFIG_ORG_KEY: 'orgCustom', - ORGS_CONFIG_FIELD_HEADINGS_KEY: ['heading_1', 'heading_2', 'heading_3'] - }, - { - ORGS_CONFIG_ORG_KEY: 'otherCustomOrg', - ORGS_CONFIG_FIELD_HEADINGS_KEY: ['unique_id'] - } -] -DEFAULT_FIELD_VALUES = { - 'user_id': USER_ID, - LEARNER_ORIGINAL_USERNAME_KEY: 'username', - 'original_email': 'invalid', - 'original_name': UNICODE_NAME_CONSTANT, - 'deletion_completed': DELETION_TIME -} - - -def _call_script(expect_success=True, expected_num_rows=10, config_orgs=None, expected_fields=None): - """ - Call the retired learner script with the given username and a generic, temporary config file. - Returns the CliRunner.invoke results - """ - if expected_fields is None: - expected_fields = DEFAULT_FIELD_VALUES - if config_orgs is None: - config_orgs = FAKE_ORGS - - runner = CliRunner() - with runner.isolated_filesystem(): - with open(TEST_CONFIG_YML_NAME, 'w') as config_f: - fake_config_file(config_f, config_orgs) - with open(TEST_GOOGLE_SECRETS_FILENAME, 'w') as secrets_f: - fake_google_secrets_file(secrets_f) - - tmp_output_dir = 'test_output_dir' - os.mkdir(tmp_output_dir) - - result = runner.invoke( - generate_report, - args=[ - '--config_file', - TEST_CONFIG_YML_NAME, - '--google_secrets_file', - TEST_GOOGLE_SECRETS_FILENAME, - '--output_dir', - tmp_output_dir - ] - ) - - print(result) - print(result.output) - - if expect_success: - assert result.exit_code == 0 - - if config_orgs is None: - # These are the orgs - config_org_vals = flatten_partner_list(FAKE_ORGS.values()) - else: - config_org_vals = flatten_partner_list(config_orgs.values()) - - # Normalize the unicode as the script does - if PY2: - config_org_vals = [org.decode('utf-8') for org in config_org_vals] - - config_org_vals = [unicodedata.normalize('NFKC', org) for org in config_org_vals] - - for org in config_org_vals: - outfile = os.path.join(tmp_output_dir, '{}_{}_{}_{}.csv'.format( - REPORTING_FILENAME_PREFIX, TEST_PLATFORM_NAME, org, date.today().isoformat() - )) - - with open(outfile, 'r') as csvfile: - reader = csv.DictReader(csvfile) - rows = [] - for row in reader: - for field_key in expected_fields: - field_value = expected_fields[field_key] - assert field_value in row[field_key] - rows.append(row) - - # Confirm the number of rows - assert len(rows) == expected_num_rows - return result - - -def _fake_retirement_report_user(seed_val, user_orgs=None, user_orgs_config=None): - """ - Creates unique user to populate a fake report with. - - seed_val is a number or other unique value for this user, will be formatted into - user values to make sure they're distinct. - - user_orgs, if given, should be a list of orgs that will be associated with the user. - - user_orgs_config, if given, should be a list of dicts mapping orgs to their customized - field headings. These orgs will also be associated with the user. - """ - user_info = { - 'user_id': USER_ID, - LEARNER_ORIGINAL_USERNAME_KEY: 'username_{}'.format(seed_val), - 'original_email': 'user_{}@foo.invalid'.format(seed_val), - 'original_name': '{} {}'.format(UNICODE_NAME_CONSTANT, seed_val), - LEARNER_CREATED_KEY: DELETION_TIME, - } - - if user_orgs is not None: - user_info[ORGS_KEY] = user_orgs - - if user_orgs_config is not None: - user_info[ORGS_CONFIG_KEY] = user_orgs_config - - return user_info - - -def _fake_retirement_report(num_users=10, user_orgs=None, user_orgs_config=None): - """ - Fake the output of a retirement report with unique users - """ - return [_fake_retirement_report_user(i, user_orgs, user_orgs_config) for i in range(num_users)] - - -@patch('tubular.edx_api.LmsApi.retirement_partner_report') -@patch('tubular.edx_api.BaseApiClient.get_access_token') -def test_report_generation_multiple_partners(*args, **kwargs): - mock_get_access_token = args[0] - mock_retirement_report = args[1] - - org_1_users = [_fake_retirement_report_user(i, user_orgs=['org1']) for i in range(1,3)] - org_2_users = [_fake_retirement_report_user(i, user_orgs=['org2']) for i in range(3,5)] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_retirement_report.return_value = org_1_users + org_2_users - - config = { - 'client_id': 'bogus id', - 'client_secret': 'supersecret', - 'base_urls': { - 'lms': 'https://stage-edx-edxapp.edx.invalid/', - }, - 'org_partner_mapping': { - 'org1': ['Org1X'], - 'org2': ['Org2X', 'Org2Xb'], - } - } - SETUP_LMS_OR_EXIT(config) - orgs, usernames = _get_orgs_and_learners_or_exit(config) - - assert usernames == [{'original_username': 'username_{}'.format(username)} for username in range(1,5)] - - def _get_learner_usernames(org_data): - return [learner['original_username'] for learner in org_data['learners']] - - assert _get_learner_usernames(orgs['Org1X']) == ['username_1', 'username_2'] - - # Org2X and Org2Xb should have the same learners in their report data - assert _get_learner_usernames(orgs['Org2X']) == _get_learner_usernames(orgs['Org2Xb']) == ['username_3', 'username_4'] - - # Org2X and Org2Xb report data should match - assert orgs['Org2X'] == orgs['Org2Xb'] - - -@patch('tubular.google_api.DriveApi.__init__') -@patch('tubular.google_api.DriveApi.create_file_in_folder') -@patch('tubular.google_api.DriveApi.walk_files') -@patch('tubular.google_api.DriveApi.list_permissions_for_files') -@patch('tubular.google_api.DriveApi.create_comments_for_files') -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - retirement_partner_report=DEFAULT, - retirement_partner_cleanup=DEFAULT -) -def test_successful_report(*args, **kwargs): - mock_get_access_token = args[0] - mock_create_comments = args[1] - mock_list_permissions = args[2] - mock_walk_files = args[3] - mock_create_files = args[4] - mock_driveapi = args[5] - mock_retirement_report = kwargs['retirement_partner_report'] - mock_retirement_cleanup = kwargs['retirement_partner_cleanup'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_create_comments.return_value = None - fake_partners = list(itervalues(FAKE_ORGS)) - # Generate the list_permissions return value. - # The first few have POCs. - mock_list_permissions.return_value = { - 'folder' + partner: [ - {'emailAddress': 'some.contact@example.com'}, # The POC. - {'emailAddress': 'another.contact@edx.org'}, - ] - for partner in flatten_partner_list(fake_partners[:2]) - } - # The last one does not have any POCs. - mock_list_permissions.return_value.update({ - 'folder' + partner: [ - {'emailAddress': 'another.contact@edx.org'}, - ] - for partner in fake_partners[2] - }) - mock_walk_files.return_value = [{'name': partner, 'id': 'folder' + partner} for partner in flatten_partner_list(FAKE_ORGS.values())] - mock_create_files.side_effect = ['foo', 'bar', 'baz', 'qux'] - mock_driveapi.return_value = None - mock_retirement_report.return_value = _fake_retirement_report(user_orgs=list(FAKE_ORGS.keys())) - - result = _call_script() - - # Make sure we're getting the LMS token - mock_get_access_token.assert_called_once() - - # Make sure that we get the report - mock_retirement_report.assert_called_once() - - # Make sure we tried to upload the files - assert mock_create_files.call_count == 4 - - # Make sure we tried to add comments to the files - assert mock_create_comments.call_count == 1 - # First [0] returns all positional args, second [0] gets the first positional arg. - create_comments_file_ids, create_comments_messages = zip(*mock_create_comments.call_args[0][0]) - assert set(create_comments_file_ids).issubset(set(['foo', 'bar', 'baz', 'qux'])) - assert len(create_comments_file_ids) == 2 # only two comments created, the third didn't have a POC. - assert all('+some.contact@example.com' in msg for msg in create_comments_messages) - assert all('+another.contact@edx.org' not in msg for msg in create_comments_messages) - assert 'WARNING: could not find a POC' in result.output - - # Make sure we tried to remove the users from the queue - mock_retirement_cleanup.assert_called_with( - [{'original_username': user[LEARNER_ORIGINAL_USERNAME_KEY]} for user in mock_retirement_report.return_value] - ) - - assert 'All reports completed and uploaded to Google.' in result.output - - -@patch('tubular.google_api.DriveApi.__init__') -@patch('tubular.google_api.DriveApi.create_file_in_folder') -@patch('tubular.google_api.DriveApi.walk_files') -@patch('tubular.google_api.DriveApi.list_permissions_for_files') -@patch('tubular.google_api.DriveApi.create_comments_for_files') -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - retirement_partner_report=DEFAULT, - retirement_partner_cleanup=DEFAULT -) -def test_successful_report_org_config(*args, **kwargs): - mock_get_access_token = args[0] - mock_create_comments = args[1] - mock_list_permissions = args[2] - mock_walk_files = args[3] - mock_create_files = args[4] - mock_driveapi = args[5] - mock_retirement_report = kwargs['retirement_partner_report'] - mock_retirement_cleanup = kwargs['retirement_partner_cleanup'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_create_comments.return_value = None - fake_custom_orgs = { - 'orgCustom': ['firstBlah'] - } - fake_partners = list(itervalues(fake_custom_orgs)) - mock_list_permissions.return_value = { - 'folder' + partner: [ - {'emailAddress': 'some.contact@example.com'}, # The POC. - {'emailAddress': 'another.contact@edx.org'}, - ] - for partner in flatten_partner_list(fake_partners[:2]) - } - mock_walk_files.return_value = [{'name': partner, 'id': 'folder' + partner} for partner in - flatten_partner_list(fake_custom_orgs.values())] - mock_create_files.side_effect = ['foo', 'bar', 'baz'] - mock_driveapi.return_value = None - expected_num_users = 1 - - orgs_config = [ - { - ORGS_CONFIG_ORG_KEY: 'orgCustom', - ORGS_CONFIG_FIELD_HEADINGS_KEY: ['heading_1', 'heading_2', 'heading_3'] - } - ] - - # Input from the LMS - report_data = [ - { - 'heading_1': 'h1val', - 'heading_2': 'h2val', - 'heading_3': 'h3val', - LEARNER_ORIGINAL_USERNAME_KEY: 'blah', - LEARNER_CREATED_KEY: DELETION_TIME, - ORGS_CONFIG_KEY: orgs_config - } - ] - - # Resulting csv file content - expected_fields = { - 'heading_1': 'h1val', - 'heading_2': 'h2val', - 'heading_3': 'h3val', - } - - mock_retirement_report.return_value = report_data - - result = _call_script(expected_num_rows=expected_num_users, config_orgs=fake_custom_orgs, - expected_fields=expected_fields) - - # Make sure we're getting the LMS token - mock_get_access_token.assert_called_once() - - # Make sure that we get the report - mock_retirement_report.assert_called_once() - - # Make sure we tried to remove the users from the queue - mock_retirement_cleanup.assert_called_with( - [{'original_username': user[LEARNER_ORIGINAL_USERNAME_KEY]} for user in mock_retirement_report.return_value] - ) - - assert 'All reports completed and uploaded to Google.' in result.output - - -def test_no_config(): - runner = CliRunner() - result = runner.invoke(generate_report) - print(result.output) - assert result.exit_code == ERR_NO_CONFIG - assert 'No config file' in result.output - - -def test_no_secrets(): - runner = CliRunner() - result = runner.invoke(generate_report, args=['--config_file', 'does_not_exist.yml']) - print(result.output) - assert result.exit_code == ERR_NO_SECRETS - assert 'No secrets file' in result.output - - -def test_no_output_dir(): - runner = CliRunner() - with runner.isolated_filesystem(): - with open(TEST_CONFIG_YML_NAME, 'w') as config_f: - config_f.write('irrelevant') - - with open(TEST_GOOGLE_SECRETS_FILENAME, 'w') as config_f: - config_f.write('irrelevant') - - result = runner.invoke( - generate_report, - args=[ - '--config_file', - TEST_CONFIG_YML_NAME, - '--google_secrets_file', - TEST_GOOGLE_SECRETS_FILENAME - ] - ) - print(result.output) - assert result.exit_code == ERR_NO_OUTPUT_DIR - assert 'No output_dir' in result.output - - -def test_bad_config(): - runner = CliRunner() - with runner.isolated_filesystem(): - with open(TEST_CONFIG_YML_NAME, 'w') as config_f: - config_f.write(']this is bad yaml') - - with open(TEST_GOOGLE_SECRETS_FILENAME, 'w') as config_f: - config_f.write('{this is bad json but we should not get to parsing it') - - tmp_output_dir = 'test_output_dir' - os.mkdir(tmp_output_dir) - - result = runner.invoke( - generate_report, - args=[ - '--config_file', - TEST_CONFIG_YML_NAME, - '--google_secrets_file', - TEST_GOOGLE_SECRETS_FILENAME, - '--output_dir', - tmp_output_dir - ] - ) - print(result.output) - assert result.exit_code == ERR_BAD_CONFIG - assert 'Failed to read' in result.output - - -def test_bad_secrets(): - runner = CliRunner() - with runner.isolated_filesystem(): - with open(TEST_CONFIG_YML_NAME, 'w') as config_f: - fake_config_file(config_f) - - with open(TEST_GOOGLE_SECRETS_FILENAME, 'w') as config_f: - config_f.write('{this is bad json') - - tmp_output_dir = 'test_output_dir' - os.mkdir(tmp_output_dir) - - result = runner.invoke( - generate_report, - args=[ - '--config_file', - TEST_CONFIG_YML_NAME, - '--google_secrets_file', - TEST_GOOGLE_SECRETS_FILENAME, - '--output_dir', - tmp_output_dir - ] - ) - print(result.output) - assert result.exit_code == ERR_BAD_SECRETS - assert 'Failed to read' in result.output - - -def test_bad_output_dir(): - runner = CliRunner() - with runner.isolated_filesystem(): - with open(TEST_CONFIG_YML_NAME, 'w') as config_f: - fake_config_file(config_f) - - with open(TEST_GOOGLE_SECRETS_FILENAME, 'w') as config_f: - fake_google_secrets_file(config_f) - - result = runner.invoke( - generate_report, - args=[ - '--config_file', - TEST_CONFIG_YML_NAME, - '--google_secrets_file', - TEST_GOOGLE_SECRETS_FILENAME, - '--output_dir', - 'does_not_exist/at_all' - ] - ) - print(result.output) - assert result.exit_code == ERR_NO_OUTPUT_DIR - assert 'or path does not exist' in result.output - - -@patch('tubular.edx_api.BaseApiClient.get_access_token') -def test_setup_failed(*args): - mock_get_access_token = args[0] - mock_get_access_token.side_effect = Exception('boom') - - result = _call_script(expect_success=False) - mock_get_access_token.assert_called_once() - assert result.exit_code == ERR_SETUP_FAILED - - -@patch('tubular.google_api.DriveApi.__init__') -@patch('tubular.google_api.DriveApi.walk_files') -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - retirement_partner_report=DEFAULT) -def test_fetching_learners_failed(*args, **kwargs): - mock_get_access_token = args[0] - mock_walk_files = args[1] - mock_drive_init = args[2] - mock_retirement_report = kwargs['retirement_partner_report'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_walk_files.return_value = [{'name': 'dummy_file_name', 'id': 'dummy_file_id'}] - mock_drive_init.return_value = None - mock_retirement_report.side_effect = Exception('failed to get learners') - - result = _call_script(expect_success=False) - - assert result.exit_code == ERR_FETCHING_LEARNERS - assert 'failed to get learners' in result.output - - -@patch('tubular.google_api.DriveApi.__init__') -@patch('tubular.google_api.DriveApi.walk_files') -@patch('tubular.edx_api.BaseApiClient.get_access_token') -def test_listing_folders_failed(*args): - mock_get_access_token = args[0] - mock_walk_files = args[1] - mock_drive_init = args[2] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_walk_files.side_effect = [[], Exception()] - mock_drive_init.return_value = None - - # call it once; this time walk_files will return an empty list. - result = _call_script(expect_success=False) - - assert result.exit_code == ERR_DRIVE_LISTING - assert 'Finding partner directories on Drive failed' in result.output - - # call it a second time; this time walk_files will throw an exception. - result = _call_script(expect_success=False) - - assert result.exit_code == ERR_DRIVE_LISTING - assert 'Finding partner directories on Drive failed' in result.output - - -@patch('tubular.google_api.DriveApi.__init__') -@patch('tubular.google_api.DriveApi.walk_files') -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - retirement_partner_report=DEFAULT) -def test_unknown_org(*args, **kwargs): - mock_get_access_token = args[0] - mock_drive_init = args[2] - mock_retirement_report = kwargs['retirement_partner_report'] - - mock_drive_init.return_value = None - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - - orgs = ['orgA', 'orgB'] - - mock_retirement_report.return_value = [_fake_retirement_report_user(i, orgs, TEST_ORGS_CONFIG) for i in range(10)] - - result = _call_script(expect_success=False) - - assert result.exit_code == ERR_UNKNOWN_ORG - assert 'orgA' in result.output - assert 'orgB' in result.output - assert 'orgCustom' in result.output - assert 'otherCustomOrg' in result.output - - -@patch('tubular.google_api.DriveApi.__init__') -@patch('tubular.google_api.DriveApi.walk_files') -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - retirement_partner_report=DEFAULT) -def test_unknown_org_custom(*args, **kwargs): - mock_get_access_token = args[0] - mock_drive_init = args[2] - mock_retirement_report = kwargs['retirement_partner_report'] - - mock_drive_init.return_value = None - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - - custom_orgs_config = [ - { - ORGS_CONFIG_ORG_KEY: 'singleCustomOrg', - ORGS_CONFIG_FIELD_HEADINGS_KEY: ['first_heading', 'second_heading'] - } - ] - - mock_retirement_report.return_value = [_fake_retirement_report_user(i, None, custom_orgs_config) for i in range(2)] - - result = _call_script(expect_success=False) - - assert result.exit_code == ERR_UNKNOWN_ORG - assert 'organizations {\'singleCustomOrg\'} do not exist' in result.output - - -@patch('tubular.google_api.DriveApi.__init__') -@patch('tubular.google_api.DriveApi.walk_files') -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch('unicodecsv.DictWriter') -@patch('tubular.edx_api.LmsApi.retirement_partner_report') -def test_reporting_error(*args): - mock_retirement_report = args[0] - mock_dictwriter = args[1] - mock_get_access_token = args[2] - mock_drive_init = args[4] - - error_msg = 'Fake unable to write csv' - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_dictwriter.side_effect = Exception(error_msg) - mock_drive_init.return_value = None - mock_retirement_report.return_value = _fake_retirement_report(user_orgs=list(FAKE_ORGS.keys())) - - result = _call_script(expect_success=False) - - assert result.exit_code == ERR_REPORTING - assert error_msg in result.output - -@patch('tubular.google_api.DriveApi.list_permissions_for_files') -@patch('tubular.google_api.DriveApi.create_comments_for_files') -@patch('tubular.google_api.DriveApi.walk_files') -@patch('tubular.google_api.DriveApi.__init__') -@patch('tubular.google_api.DriveApi.create_file_in_folder') -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - retirement_partner_report=DEFAULT, - retirement_partner_cleanup=DEFAULT -) -def test_cleanup_error(*args, **kwargs): - mock_get_access_token = args[0] - mock_create_files = args[1] - mock_driveapi = args[2] - mock_walk_files = args[3] - mock_create_comments = args[4] - mock_list_permissions = args[5] - mock_retirement_report = kwargs['retirement_partner_report'] - mock_retirement_cleanup = kwargs['retirement_partner_cleanup'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_create_files.return_value = True - mock_driveapi.return_value = None - mock_walk_files.return_value = [{'name': partner, 'id': 'folder' + partner} for partner in flatten_partner_list(FAKE_ORGS.values())] - fake_partners = list(itervalues(FAKE_ORGS)) - # Generate the list_permissions return value. - mock_list_permissions.return_value = { - 'folder' + partner: [ - {'emailAddress': 'some.contact@example.com'}, # The POC. - {'emailAddress': 'another.contact@edx.org'}, - {'emailAddress': 'third@edx.org'} - ] - for partner in flatten_partner_list(fake_partners) - } - mock_create_comments.return_value = None - - - mock_retirement_report.return_value = _fake_retirement_report(user_orgs=list(FAKE_ORGS.keys())) - mock_retirement_cleanup.side_effect = Exception('Mock cleanup exception') - - result = _call_script(expect_success=False) - - mock_retirement_cleanup.assert_called_with( - [{'original_username': user[LEARNER_ORIGINAL_USERNAME_KEY]} for user in mock_retirement_report.return_value] - ) - - assert result.exit_code == ERR_CLEANUP - assert 'Users may be stuck in the processing state!' in result.output - - -@patch('tubular.google_api.DriveApi.__init__') -@patch('tubular.google_api.DriveApi.create_file_in_folder') -@patch('tubular.google_api.DriveApi.walk_files') -@patch('tubular.google_api.DriveApi.list_permissions_for_files') -@patch('tubular.google_api.DriveApi.create_comments_for_files') -@patch('tubular.edx_api.BaseApiClient.get_access_token') -@patch.multiple( - 'tubular.edx_api.LmsApi', - retirement_partner_report=DEFAULT, - retirement_partner_cleanup=DEFAULT -) -def test_google_unicode_folder_names(*args, **kwargs): - mock_get_access_token = args[0] - mock_create_comments = args[1] - mock_list_permissions = args[2] - mock_walk_files = args[3] - mock_create_files = args[4] - mock_driveapi = args[5] - mock_retirement_report = kwargs['retirement_partner_report'] - mock_retirement_cleanup = kwargs['retirement_partner_cleanup'] - - mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) - mock_list_permissions.return_value = { - 'folder' + partner: [ - {'emailAddress': 'some.contact@example.com'}, - {'emailAddress': 'another.contact@edx.org'}, - ] - for partner in [ - unicodedata.normalize('NFKC', u'TéstX'), - unicodedata.normalize('NFKC', u'TéstX2'), - unicodedata.normalize('NFKC', u'TéstX3'), - ] - } - mock_walk_files.return_value = [ - {'name': partner, 'id': 'folder' + partner} - for partner in [ - unicodedata.normalize('NFKC', u'TéstX'), - unicodedata.normalize('NFKC', u'TéstX2'), - unicodedata.normalize('NFKC', u'TéstX3'), - ] - ] - mock_create_files.side_effect = ['foo', 'bar', 'baz'] - mock_driveapi.return_value = None - mock_retirement_report.return_value = _fake_retirement_report(user_orgs=list(FAKE_ORGS.keys())) - - config_orgs = { - 'org1': [unicodedata.normalize('NFKC', u'TéstX')], - 'org2': [unicodedata.normalize('NFD', u'TéstX2')], - 'org3': [unicodedata.normalize('NFKD', u'TéstX3')], - } - - result = _call_script(config_orgs=config_orgs) - - # Make sure we're getting the LMS token - mock_get_access_token.assert_called_once() - - # Make sure that we get the report - mock_retirement_report.assert_called_once() - - # Make sure we tried to upload the files - assert mock_create_files.call_count == 3 - - # Make sure we tried to add comments to the files - assert mock_create_comments.call_count == 1 - # First [0] returns all positional args, second [0] gets the first positional arg. - create_comments_file_ids, create_comments_messages = zip(*mock_create_comments.call_args[0][0]) - assert set(create_comments_file_ids) == set(['foo', 'bar', 'baz']) - assert all('+some.contact@example.com' in msg for msg in create_comments_messages) - assert all('+another.contact@edx.org' not in msg for msg in create_comments_messages) - - # Make sure we tried to remove the users from the queue - mock_retirement_cleanup.assert_called_with( - [{'original_username': user[LEARNER_ORIGINAL_USERNAME_KEY]} for user in mock_retirement_report.return_value] - ) - - assert 'All reports completed and uploaded to Google.' in result.output - - -def test_file_content_custom_headings(): - runner = CliRunner() - with runner.isolated_filesystem(): - config = {'partner_report_platform_name': 'fake_platform_name'} - tmp_output_dir = 'test_output_dir' - os.mkdir(tmp_output_dir) - - # Custom headings and values - ch1 = 'special_id' - ch1v = '134456765432' - ch2 = 'alternate_heading_for_email' - ch2v = 'zxcvbvcxz@blah.com' - custom_field_headings = [ch1, ch2] - - org_name = 'my_delightful_org' - username = 'unique_user' - learner_data = [ - { - ch1: ch1v, - ch2: ch2v, - LEARNER_ORIGINAL_USERNAME_KEY: username, - LEARNER_CREATED_KEY: DELETION_TIME, - } - ] - report_data = { - org_name: { - ORGS_CONFIG_FIELD_HEADINGS_KEY: custom_field_headings, - ORGS_CONFIG_LEARNERS_KEY: learner_data - } - } - - partner_filenames = _generate_report_files_or_exit(config, report_data, tmp_output_dir) - - assert len(partner_filenames) == 1 - filename = partner_filenames[org_name] - with open(filename) as f: - file_content = f.read() - - # Custom field headings - for ch in custom_field_headings: - # Verify custom field headings are present - assert ch in file_content - # Verify custom field values are present - assert ch1v in file_content - assert ch2v in file_content - - # Default field headings - for h in DEFAULT_FIELD_HEADINGS: - # Verify default field headings are not present - assert h not in file_content - # Verify default field values are not present - assert username not in file_content - assert DELETION_TIME not in file_content diff --git a/tubular/tests/test_salesforce.py b/tubular/tests/test_salesforce.py deleted file mode 100644 index 88d1a9a4..00000000 --- a/tubular/tests/test_salesforce.py +++ /dev/null @@ -1,147 +0,0 @@ -""" -Tests for the Salesforce API functionality -""" -import logging -from contextlib import contextmanager -import mock -import pytest -from simple_salesforce import SalesforceError - -from tubular import salesforce_api - - -@pytest.fixture -def test_learner(): - return {'original_email': 'foo@bar.com'} - - -def make_api(): - """ - Helper function to create salesforce api object - """ - return salesforce_api.SalesforceApi("user", "pass", "key", "domain", "user") - - -@contextmanager -def mock_get_user(): - """ - Context manager method to mock getting the assignee user id when the api object is created - """ - with mock.patch('tubular.salesforce_api.SalesforceApi.get_user_id') as getuser: - getuser.return_value = "userid" - yield - - -def test_no_assignee_email(): - with mock.patch('tubular.salesforce_api.SalesforceApi.get_user_id') as getuser: - getuser.return_value = None - with mock.patch('tubular.salesforce_api.Salesforce'): - with pytest.raises(Exception) as exc: - make_api() - print(str(exc)) - assert 'Could not find Salesforce user with username user' in str(exc) - - -def test_retire_no_email(): - with mock_get_user(): - with mock.patch('tubular.salesforce_api.Salesforce'): - with pytest.raises(TypeError) as exc: - make_api().retire_learner({}) - assert 'Expected an email address for user to delete, but received None.' in str(exc) - - -def test_retire_get_id_error(test_learner): # pylint: disable=redefined-outer-name - with mock_get_user(): - with mock.patch('tubular.salesforce_api.Salesforce'): - api = make_api() - api._sf.query.side_effect = SalesforceError("", "", "", "") # pylint: disable=protected-access - with pytest.raises(SalesforceError): - api.retire_learner(test_learner) - -# pylint: disable=protected-access -def test_escape_email(): - with mock.patch('tubular.salesforce_api.Salesforce'): - api = make_api() - mock_response = {'totalSize': 0, 'records': []} - api._sf.query.return_value = mock_response - api.get_lead_ids_by_email("Robert'); DROP TABLE students;--") - api._sf.query.assert_called_with( - "SELECT Id FROM Lead WHERE Email = 'Robert\\'); DROP TABLE students;--'" - ) - -# pylint: disable=protected-access -def test_escape_username(): - with mock.patch('tubular.salesforce_api.Salesforce'): - api = make_api() - mock_response = {'totalSize': 0, 'records': []} - api._sf.query.return_value = mock_response - api.get_user_id("Robert'); DROP TABLE students;--") - api._sf.query.assert_called_with( - "SELECT Id FROM User WHERE Username = 'Robert\\'); DROP TABLE students;--'" - ) - -def test_retire_learner_not_found(test_learner, caplog): # pylint: disable=redefined-outer-name - caplog.set_level(logging.INFO) - with mock_get_user(): - with mock.patch('tubular.salesforce_api.Salesforce'): - api = make_api() - mock_response = {'totalSize': 0, 'records': []} - api._sf.query.return_value = mock_response # pylint: disable=protected-access - api.retire_learner(test_learner) - assert not api._sf.Task.create.called # pylint: disable=protected-access - assert 'No action taken because no lead was found in Salesforce.' in caplog.text - - -def test_retire_task_error(test_learner, caplog): # pylint: disable=redefined-outer-name - with mock_get_user(): - with mock.patch('tubular.salesforce_api.Salesforce'): - api = make_api() - mock_query_response = {'totalSize': 1, 'records': [{'Id': 1}]} - api._sf.query.return_value = mock_query_response # pylint: disable=protected-access - mock_task_response = {'success': False, 'errors': ["This is an error!"]} - api._sf.Task.create.return_value = mock_task_response # pylint: disable=protected-access - with pytest.raises(Exception) as exc: - api.retire_learner(test_learner) - assert "Errors while creating task:" in caplog.text - assert "This is an error!" in caplog.text - assert "Unable to create retirement task for email foo@bar.com" in str(exc) - - -def test_retire_task_exception(test_learner): # pylint: disable=redefined-outer-name - with mock_get_user(): - with mock.patch('tubular.salesforce_api.Salesforce'): - api = make_api() - mock_query_response = {'totalSize': 1, 'records': [{'Id': 1}]} - api._sf.query.return_value = mock_query_response # pylint: disable=protected-access - api._sf.Task.create.side_effect = SalesforceError("", "", "", "") # pylint: disable=protected-access - with pytest.raises(SalesforceError): - api.retire_learner(test_learner) - - -def test_retire_success(test_learner, caplog): # pylint: disable=redefined-outer-name - caplog.set_level(logging.INFO) - with mock_get_user(): - with mock.patch('tubular.salesforce_api.Salesforce'): - api = make_api() - mock_query_response = {'totalSize': 1, 'records': [{'Id': 1}]} - api._sf.query.return_value = mock_query_response # pylint: disable=protected-access - mock_task_response = {'success': True, 'id': 'task-id'} - api._sf.Task.create.return_value = mock_task_response # pylint: disable=protected-access - api.retire_learner(test_learner) - assert "Successfully salesforce task created task task-id" in caplog.text - - -def test_retire_multiple_learners(test_learner, caplog): # pylint: disable=redefined-outer-name - caplog.set_level(logging.INFO) - with mock_get_user(): - with mock.patch('tubular.salesforce_api.Salesforce'): - api = make_api() - mock_response = {'totalSize': 2, 'records': [{'Id': 1}, {'Id': 2}]} - api._sf.query.return_value = mock_response # pylint: disable=protected-access - mock_task_response = {'success': True, 'id': 'task-id'} - api._sf.Task.create.return_value = mock_task_response # pylint: disable=protected-access - api.retire_learner(test_learner) - assert "Multiple Ids returned for Lead with email foo@bar.com" in caplog.text - assert "Successfully salesforce task created task task-id" in caplog.text - note = "Notice: Multiple leads were identified with the same email. Please retire all following leads:" - assert note in api._sf.Task.create.call_args[0][0]['Description'] # pylint: disable=protected-access diff --git a/tubular/tests/test_segment_api.py b/tubular/tests/test_segment_api.py deleted file mode 100644 index e311dcae..00000000 --- a/tubular/tests/test_segment_api.py +++ /dev/null @@ -1,169 +0,0 @@ -""" -Tests for the Segment API functionality -""" -import json -import mock -import pytest - -import requests -from six import text_type - -from tubular.segment_api import SegmentApi, BULK_REGULATE_URL -from tubular.tests.retirement_helpers import get_fake_user_retirement - -FAKE_AUTH_TOKEN = 'FakeToken' -TEST_SEGMENT_CONFIG = { - 'projects_to_retire': ['project_1', 'project_2'], - 'learner': [get_fake_user_retirement(), ], - 'fake_base_url': 'https://segment.invalid/', - 'fake_auth_token': FAKE_AUTH_TOKEN, - 'fake_workspace': 'FakeEdx', - 'headers': {"Authorization": "Bearer {}".format(FAKE_AUTH_TOKEN), "Content-Type": "application/json"} -} - - -class FakeResponse: - """ - Fakes out requests.post response - """ - def json(self): - """ - Returns fake Segment retirement response data in the correct format - """ - return {'regulate_id': 1} - - def raise_for_status(self): - pass - - -class FakeErrorResponse: - """ - Fakes an error response - """ - status_code = 500 - text = "{'error': 'Test error message'}" - - def json(self): - """ - Returns fake Segment retirement response error in the correct format - """ - return json.loads(self.text) - - def raise_for_status(self): - raise requests.exceptions.HTTPError("", response=self) - - -@pytest.fixture -def setup_regulation_api(): - """ - Fixture to setup common bulk delete items. - """ - with mock.patch('requests.post') as mock_post: - segment = SegmentApi( - *[TEST_SEGMENT_CONFIG[key] for key in [ - 'fake_base_url', 'fake_auth_token', 'fake_workspace' - ]] - ) - - yield mock_post, segment - - -def test_bulk_delete_success(setup_regulation_api): # pylint: disable=redefined-outer-name - """ - Test simple success case - """ - mock_post, segment = setup_regulation_api - mock_post.return_value = FakeResponse() - - learner = TEST_SEGMENT_CONFIG['learner'] - segment.delete_and_suppress_learners(learner, 1000) - - assert mock_post.call_count == 1 - - expected_learner = get_fake_user_retirement() - learners_vals = [ - text_type(expected_learner['user']['id']), - expected_learner['original_username'], - expected_learner['ecommerce_segment_id'], - ] - - fake_json = { - "regulation_type": "Suppress_With_Delete", - "attributes": { - "name": "userId", - "values": learners_vals - } - } - - url = TEST_SEGMENT_CONFIG['fake_base_url'] + BULK_REGULATE_URL.format(TEST_SEGMENT_CONFIG['fake_workspace']) - mock_post.assert_any_call( - url, json=fake_json, headers=TEST_SEGMENT_CONFIG['headers'] - ) - - -def test_bulk_delete_error(setup_regulation_api, caplog): # pylint: disable=redefined-outer-name - """ - Test simple error case - """ - mock_post, segment = setup_regulation_api - mock_post.return_value = FakeErrorResponse() - - learner = TEST_SEGMENT_CONFIG['learner'] - with pytest.raises(Exception): - segment.delete_and_suppress_learners(learner, 1000) - - assert mock_post.call_count == 4 - assert "Error was encountered for params:" in caplog.text - assert "9009" in caplog.text - assert "foo_username" in caplog.text - assert "ecommerce-90" in caplog.text - assert "Suppress_With_Delete" in caplog.text - assert "Test error message" in caplog.text - - -def test_bulk_unsuppress_success(setup_regulation_api): # pylint: disable=redefined-outer-name - """ - Test simple success case - """ - mock_post, segment = setup_regulation_api - mock_post.return_value = FakeResponse() - - learner = TEST_SEGMENT_CONFIG['learner'] - segment.unsuppress_learners_by_key('original_username', learner, 100) - - assert mock_post.call_count == 1 - - expected_learner = get_fake_user_retirement() - - fake_json = { - "regulation_type": "Unsuppress", - "attributes": { - "name": "userId", - "values": [expected_learner['original_username'], ] - } - } - - url = TEST_SEGMENT_CONFIG['fake_base_url'] + BULK_REGULATE_URL.format(TEST_SEGMENT_CONFIG['fake_workspace']) - mock_post.assert_any_call( - url, json=fake_json, headers=TEST_SEGMENT_CONFIG['headers'] - ) - - -def test_bulk_unsuppress_error(setup_regulation_api, caplog): # pylint: disable=redefined-outer-name - """ - Test simple error case - """ - mock_post, segment = setup_regulation_api - mock_post.return_value = FakeErrorResponse() - - learner = TEST_SEGMENT_CONFIG['learner'] - with pytest.raises(Exception): - segment.unsuppress_learners_by_key('original_username', learner, 100) - - assert mock_post.call_count == 4 - assert "Error was encountered for params:" in caplog.text - assert "9009" not in caplog.text - assert "foo_username" in caplog.text - assert "ecommerce-90" not in caplog.text - assert "Unsuppress" in caplog.text - assert "Test error message" in caplog.text