diff --git a/cdci_data_analysis/analysis/drupal_helper.py b/cdci_data_analysis/analysis/drupal_helper.py index ded4b7875..f0806ed45 100644 --- a/cdci_data_analysis/analysis/drupal_helper.py +++ b/cdci_data_analysis/analysis/drupal_helper.py @@ -21,6 +21,8 @@ from enum import Enum, auto from astropy.coordinates import SkyCoord, Angle from astropy import units as u +from astroquery.simbad import Simbad +import xml.etree.ElementTree as ET from cdci_data_analysis.analysis import tokenHelper from ..analysis.exceptions import RequestNotUnderstood, InternalError, RequestNotAuthorized @@ -551,11 +553,14 @@ def post_content_to_gallery(decoded_token, if update_astro_entity: auto_update = kwargs.pop('auto_update', 'False') == 'True' if auto_update is True: - name_resolver_url = disp_conf.name_resolver_url + local_name_resolver_url = disp_conf.local_name_resolver_url + external_name_resolver_url = disp_conf.external_name_resolver_url entities_portal_url = disp_conf.entities_portal_url - resolved_obj = resolve_name(name_resolver_url=name_resolver_url, + resolved_obj = resolve_name(local_name_resolver_url=local_name_resolver_url, + external_name_resolver_url=external_name_resolver_url, entities_portal_url=entities_portal_url, - name=src_name) + name=src_name, + sentry_dsn=sentry_dsn) if resolved_obj is not None: msg = '' if 'message' in resolved_obj: @@ -1488,39 +1493,128 @@ def check_matching_coords(source_1_name, source_1_coord_ra, source_1_coord_dec, return False -def resolve_name(name_resolver_url: str, entities_portal_url: str = None, name: str = None): +def resolve_name(local_name_resolver_url: str, external_name_resolver_url: str, entities_portal_url: str = None, name: str = None, sentry_dsn=None): resolved_obj = {} if name is not None: quoted_name = urllib.parse.quote(name.strip()) - res = requests.get(name_resolver_url.format(quoted_name)) - if res.status_code == 200: - returned_resolved_obj = res.json() - if 'success' in returned_resolved_obj: + local_name_resolver_url_formatted = local_name_resolver_url.format(quoted_name) + try: + res = requests.get(local_name_resolver_url_formatted) + if res.status_code == 200: + returned_resolved_obj = res.json() + if 'success' in returned_resolved_obj: + resolved_obj['name'] = name.replace('_', ' ') + if returned_resolved_obj['success']: + logger.info(f"object {name} successfully resolved") + if 'ra' in returned_resolved_obj: + resolved_obj['RA'] = float(returned_resolved_obj['ra']) + if 'dec' in returned_resolved_obj: + resolved_obj['DEC'] = float(returned_resolved_obj['dec']) + if 'object_ids' in returned_resolved_obj: + resolved_obj['object_ids'] = returned_resolved_obj['object_ids'] + if 'object_type' in returned_resolved_obj: + resolved_obj['object_type'] = returned_resolved_obj['object_type'] + resolved_obj['entity_portal_link'] = entities_portal_url.format(quoted_name) + resolved_obj['message'] = f'{name} successfully resolved' + elif not returned_resolved_obj['success']: + logger.info(f"resolution of the object {name} unsuccessful") + resolved_obj['message'] = f'{name} could not be resolved' + else: + logger.warning("There seems to be some problem in completing the request for the resolution of the object" + f" \"{name}\" using the local resolver.\n" + f"The request lead to the error {res.text}, " + "this might be due to an error in the url or the service " + "requested is currently not available. The external resolver will be used.") + if sentry_dsn is not None: + sentry.capture_message(f'Failed to resolve object "{name}" using the local resolver. ' + f'URL: {local_name_resolver_url_formatted} ' + f'Status Code: {res.status_code} ' + f'Response: {res.text}') + except (ConnectionError, + requests.exceptions.ConnectionError, + requests.exceptions.Timeout) as e: + logger.warning(f'An exception occurred while trying to resolve the object "{name}" using the local resolver. ' + f'using the url: {local_name_resolver_url_formatted}. Exception details: {str(e)}') + if sentry_dsn is not None: + sentry.capture_message(f'An exception occurred while trying to resolve the object "{name}" using the local resolver. ' + f'URL: {local_name_resolver_url_formatted} ' + f"Exception details: {str(e)}") + external_name_resolver_url_formatted = external_name_resolver_url.format(quoted_name) + try: + res = requests.get(external_name_resolver_url_formatted) + if res.status_code == 200: + root = ET.fromstring(res.text) resolved_obj['name'] = name.replace('_', ' ') - if returned_resolved_obj['success']: - logger.info(f"object {name} successfully resolved") - if 'ra' in returned_resolved_obj: - resolved_obj['RA'] = float(returned_resolved_obj['ra']) - if 'dec' in returned_resolved_obj: - resolved_obj['DEC'] = float(returned_resolved_obj['dec']) - if 'object_ids' in returned_resolved_obj: - resolved_obj['object_ids'] = returned_resolved_obj['object_ids'] - if 'object_type' in returned_resolved_obj: - resolved_obj['object_type'] = returned_resolved_obj['object_type'] - resolved_obj['entity_portal_link'] = entities_portal_url.format(quoted_name) - resolved_obj['message'] = f'{name} successfully resolved' - elif not returned_resolved_obj['success']: - logger.info(f"resolution of the object {name} unsuccessful") + resolver_tag = root.find('.//Resolver') + if resolver_tag is not None: + ra_tag = resolver_tag.find('.//jradeg') + dec_tag = resolver_tag.find('.//jdedeg') + if ra_tag is None or dec_tag is None: + info_tag = root.find('.//INFO') + resolved_obj['message'] = f'{name} could not be resolved' + if info_tag is not None: + message_info = info_tag.text + resolved_obj['message'] += f': {message_info}' + else: + resolved_obj['RA'] = float(ra_tag.text) + resolved_obj['DEC'] = float(dec_tag.text) + resolved_obj['entity_portal_link'] = entities_portal_url.format(quoted_name) + resolved_obj['message'] = f'{name} successfully resolved' + + try: + Simbad.add_votable_fields("otype") + result_table = Simbad.query_object(quoted_name) + object_type = str(result_table[0]['OTYPE']).strip() + resolved_obj['object_type'] = object_type + except Exception as e: + logger.warning(f"An exception occurred while using Simbad to query the object \"{name}\" " + f"while using the external resolver:\n{str(e)}") + resolved_obj['object_type'] = None + try: + object_ids_table = Simbad.query_objectids(name) + source_ids_list = object_ids_table['ID'].tolist() + resolved_obj['object_ids'] = source_ids_list + except Exception as e: + logger.warning(f"An exception occurred while using Simbad to query the object ids for the object \"{name}\" " + f"while using the external resolver:\n{str(e)}") + resolved_obj['object_ids'] = None + else: + warning_msg = ("There seems to be some problem in completing the request for the resolution of the object" + f" \"{name}\" using the external resolver.") resolved_obj['message'] = f'{name} could not be resolved' - else: - logger.warning(f"there seems to be some problem in completing the request for the resolution of the object: {name}\n" - f"the request lead to the error {res.text}, " - "this might be due to an error in the url or the service " - "requested is currently not available, " - "please check your request and try to issue it again") - raise InternalError('issue when performing a request to the local resolver', - status_code=500, - payload={'drupal_helper_error_message': res.text}) + info_tag = root.find('.//INFO') + if info_tag is not None: + warning_msg += (f"The request lead to the error {info_tag.text}, " + "this might be due to an error in the name of the object that ha been provided.") + resolved_obj['message'] += f': {info_tag.text}' + logger.warning(warning_msg) + if sentry_dsn is not None: + sentry.capture_message(f'Failed to resolve object "{name}" using the external resolver. ' + f'URL: {external_name_resolver_url_formatted} ' + f'Status Code: {res.status_code} ' + f'Response: {res.text}' + f"Info returned from the resolver: {resolved_obj['message']}") + else: + logger.warning("There seems to be some problem in completing the request for the resolution of the object" + f" \"{name}\" using the external resolver.\n" + f"The request lead to the error {res.text}, " + "this might be due to an error in the url or the service " + "requested is currently not available. The object could not be resolved.") + if sentry_dsn is not None: + sentry.capture_message(f'Failed to resolve object "{name}" using the external resolver. ' + f'URL: {external_name_resolver_url_formatted} ' + f'Status Code: {res.status_code} ' + f'Response: {res.text}') + resolved_obj['message'] = f'{name} could not be resolved: {res.text}' + except (ConnectionError, + requests.exceptions.ConnectionError, + requests.exceptions.Timeout) as e: + logger.warning(f'An exception occurred while trying to resolve the object "{name}" using the local resolver. ' + f'using the url: {external_name_resolver_url_formatted}. Exception details: {str(e)}') + if sentry_dsn is not None: + sentry.capture_message(f'An exception occurred while trying to resolve the object "{name}" using the external resolver. ' + f'URL: {external_name_resolver_url_formatted} ' + f"Exception details: {str(e)}") return resolved_obj diff --git a/cdci_data_analysis/config_dir/conf_env.yml.example b/cdci_data_analysis/config_dir/conf_env.yml.example index 2dd658681..f5aec142e 100644 --- a/cdci_data_analysis/config_dir/conf_env.yml.example +++ b/cdci_data_analysis/config_dir/conf_env.yml.example @@ -115,8 +115,10 @@ dispatcher: product_gallery_secret_key: PRODUCT_GALLERY_SECRET_KEY # timezone used within the drupal configuration, these two values have to be always aligned product_gallery_timezone: PRODUCT_GALLERY_SECRET_KEY - # url of the name resolver - name_resolver_url: NAME_RESOLVER_URL + # url of the local name resolver + local_name_resolver_url: NAME_RESOLVER_URL + # url of the external name resolver + external_name_resolver_url: NAME_RESOLVER_URL # url of the online catalog for astrophysical entities entities_portal_url: ENTITIES_PORTAL_URL # url for the conversion of a given time, in UTC format, to the correspondent REVNUM diff --git a/cdci_data_analysis/configurer.py b/cdci_data_analysis/configurer.py index d35e14896..e6591a965 100644 --- a/cdci_data_analysis/configurer.py +++ b/cdci_data_analysis/configurer.py @@ -260,7 +260,10 @@ def __init__(self, cfg_dict, origin=None): disp_dict.get('product_gallery_options', {}).get('product_gallery_secret_key', None), disp_dict.get('product_gallery_options', {}).get('product_gallery_timezone', "Europe/Zurich"), - disp_dict.get('product_gallery_options', {}).get('name_resolver_url', 'https://resolver-prod.obsuks1.unige.ch/api/v1.1/byname/{}'), + disp_dict.get('product_gallery_options', {}).get('local_name_resolver_url', + 'https://resolver-prod.obsuks1.unige.ch/api/v1.1/byname/{}'), + disp_dict.get('product_gallery_options', {}).get('external_name_resolver_url', + 'http://cdsweb.u-strasbg.fr/cgi-bin/nph-sesame/-oxp/NSV?{}'), disp_dict.get('product_gallery_options', {}).get('entities_portal_url', 'http://cdsportal.u-strasbg.fr/?target={}'), disp_dict.get('product_gallery_options', {}).get('converttime_revnum_service_url', 'https://www.astro.unige.ch/mmoda/dispatch-data/gw/timesystem/api/v1.0/converttime/UTC/{}/REVNUM'), disp_dict.get('renku_options', {}).get('renku_gitlab_repository_url', None), @@ -338,7 +341,8 @@ def set_conf_dispatcher(self, product_gallery_url, product_gallery_secret_key, product_gallery_timezone, - name_resolver_url, + local_name_resolver_url, + external_name_resolver_url, entities_portal_url, converttime_revnum_service_url, renku_gitlab_repository_url, @@ -389,7 +393,8 @@ def set_conf_dispatcher(self, self.product_gallery_url = product_gallery_url self.product_gallery_secret_key = product_gallery_secret_key self.product_gallery_timezone = product_gallery_timezone - self.name_resolver_url = name_resolver_url + self.local_name_resolver_url = local_name_resolver_url + self.external_name_resolver_url = external_name_resolver_url self.entities_portal_url = entities_portal_url self.converttime_revnum_service_url = converttime_revnum_service_url self.renku_gitlab_repository_url = renku_gitlab_repository_url diff --git a/cdci_data_analysis/flask_app/app.py b/cdci_data_analysis/flask_app/app.py index 8627ac8b0..968b600be 100644 --- a/cdci_data_analysis/flask_app/app.py +++ b/cdci_data_analysis/flask_app/app.py @@ -650,12 +650,17 @@ def resolve_name(): name = par_dic.get('name', None) - name_resolver_url = app_config.name_resolver_url + local_name_resolver_url = app_config.local_name_resolver_url + external_name_resolver_url = app_config.external_name_resolver_url entities_portal_url = app_config.entities_portal_url - resolve_object = drupal_helper.resolve_name(name_resolver_url=name_resolver_url, + sentry_dsn = sentry.sentry_url + + resolve_object = drupal_helper.resolve_name(local_name_resolver_url=local_name_resolver_url, + external_name_resolver_url=external_name_resolver_url, entities_portal_url=entities_portal_url, - name=name) + name=name, + sentry_dsn=sentry_dsn) return resolve_object diff --git a/cdci_data_analysis/flask_app/dispatcher_query.py b/cdci_data_analysis/flask_app/dispatcher_query.py index e086245f6..4dfad4e0e 100644 --- a/cdci_data_analysis/flask_app/dispatcher_query.py +++ b/cdci_data_analysis/flask_app/dispatcher_query.py @@ -22,6 +22,7 @@ import glob import string import random +import fcntl from flask import jsonify, send_from_directory, make_response from flask import request, g @@ -886,9 +887,13 @@ def get_request_files_dir(self): return request_files_dir.path def set_scratch_dir(self, session_id, job_id=None, verbose=False): - if verbose == True: - print('SETSCRATCH ---->', session_id, - type(session_id), job_id, type(job_id)) + lock_file = f".lock_{self.job_id}" + scratch_dir_retry_attempts = 5 + scratch_dir_retry_delay = 0.2 + scratch_dir_created = True + + if verbose: + print('SETSCRATCH ---->', session_id, type(session_id), job_id, type(job_id)) wd = 'scratch' @@ -898,14 +903,28 @@ def set_scratch_dir(self, session_id, job_id=None, verbose=False): if job_id is not None: wd += '_jid_'+job_id - alias_workdir = self.get_existing_job_ID_path( - wd=FilePath(file_dir=wd).path) - if alias_workdir is not None: - wd = wd+'_aliased' - - wd = FilePath(file_dir=wd) - wd.mkdir() - self.scratch_dir = wd.path + for attempt in range(scratch_dir_retry_attempts): + try: + with open(lock_file, 'w') as lock: + fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB) + alias_workdir = self.get_existing_job_ID_path(wd=FilePath(file_dir=wd).path) + if alias_workdir is not None: + wd = wd + '_aliased' + + wd_path_obj = FilePath(file_dir=wd) + wd_path_obj.mkdir() + self.scratch_dir = wd_path_obj.path + scratch_dir_created = True + break + except (OSError, IOError) as io_e: + scratch_dir_created = False + self.logger.warning(f'Failed to acquire lock for the scratch directory creation, attempt number {attempt + 1} ({scratch_dir_retry_attempts - (attempt + 1)} left), sleeping {scratch_dir_retry_delay} seconds until retry.\nError: {str(io_e)}') + time.sleep(scratch_dir_retry_delay) + + if not scratch_dir_created: + dir_list = glob.glob(f"*_jid_{job_id}*") + sentry.capture_message(f"Failed to acquire lock for directory creation after multiple attempts.\njob_id: {self.job_id}\ndir_list: {dir_list}") + raise InternalError(f"Failed to acquire lock for directory creation after {scratch_dir_retry_attempts} attempts.", status_code=500) def set_temp_dir(self, session_id, job_id=None, verbose=False): if verbose: @@ -1659,9 +1678,7 @@ def set_config(self): def get_existing_job_ID_path(self, wd): # exist same job_ID, different session ID dir_list = glob.glob(f'*_jid_{self.job_id}') - # print('dirs',dir_list) - if dir_list: - dir_list = [d for d in dir_list if 'aliased' not in d] + dir_list = [d for d in dir_list if 'aliased' not in d] if len(dir_list) == 1: if dir_list[0] != wd: @@ -1670,9 +1687,8 @@ def get_existing_job_ID_path(self, wd): alias_dir = None elif len(dir_list) > 1: - sentry.capture_message(f'Found two non aliased identical job_id, dir_list: {dir_list}') - self.logger.warning(f'Found two non aliased identical job_id, dir_list: {dir_list}') - + sentry.capture_message(f'Found two or more non aliased identical job_id, dir_list: {dir_list}') + self.logger.warning(f'Found two or more non aliased identical job_id, dir_list: {dir_list}') raise InternalError("We have encountered an internal error! " "Our team is notified and is working on it. We are sorry! " "When we find a solution we will try to reach you", @@ -1683,6 +1699,7 @@ def get_existing_job_ID_path(self, wd): return alias_dir + def get_file_mtime(self, file): return os.path.getmtime(file) diff --git a/cdci_data_analysis/pytest_fixtures.py b/cdci_data_analysis/pytest_fixtures.py index 85649a536..74b91092f 100644 --- a/cdci_data_analysis/pytest_fixtures.py +++ b/cdci_data_analysis/pytest_fixtures.py @@ -604,13 +604,55 @@ def dispatcher_test_conf_with_gallery_fn(dispatcher_test_conf_fn): '\n product_gallery_url: "http://cdciweb02.astro.unige.ch/mmoda/galleryd"' f'\n product_gallery_secret_key: "{os.getenv("DISPATCHER_PRODUCT_GALLERY_SECRET_KEY", "secret_key")}"' '\n product_gallery_timezone: "Europe/Zurich"' - '\n name_resolver_url: "https://resolver-prod.obsuks1.unige.ch/api/v1.1/byname/{}"' + '\n local_name_resolver_url: "https://resolver-prod.obsuks1.unige.ch/api/v1.1/byname/{}"' + '\n external_name_resolver_url: "http://cdsweb.u-strasbg.fr/cgi-bin/nph-sesame/-oxp/NSV?{}"' '\n entities_portal_url: "http://cdsportal.u-strasbg.fr/?target={}"' '\n converttime_revnum_service_url: "https://www.astro.unige.ch/mmoda/dispatch-data/gw/timesystem/api/v1.0/converttime/UTC/{}/REVNUM"') yield fn +@pytest.fixture +def dispatcher_test_conf_with_gallery_invalid_local_resolver_fn(dispatcher_test_conf_fn): + fn = "test-dispatcher-conf-with-gallery.yaml" + + with open(fn, "w") as f: + with open(dispatcher_test_conf_fn) as f_default: + f.write(f_default.read()) + + f.write('\n product_gallery_options:' + '\n product_gallery_url: "http://cdciweb02.astro.unige.ch/mmoda/galleryd"' + f'\n product_gallery_secret_key: "{os.getenv("DISPATCHER_PRODUCT_GALLERY_SECRET_KEY", "secret_key")}"' + '\n product_gallery_timezone: "Europe/Zurich"' + '\n local_name_resolver_url: "http://invalid_url/"' + '\n external_name_resolver_url: "http://cdsweb.u-strasbg.fr/cgi-bin/nph-sesame/-oxp/NSV?{}"' + '\n entities_portal_url: "http://cdsportal.u-strasbg.fr/?target={}"' + '\n converttime_revnum_service_url: "https://www.astro.unige.ch/mmoda/dispatch-data/gw/timesystem/api/v1.0/converttime/UTC/{}/REVNUM"') + + yield fn + + +@pytest.fixture +def dispatcher_test_conf_with_vo_options_fn(dispatcher_test_conf_fn): + fn = "test-dispatcher-conf-with-vo-options.yaml" + + with open(fn, "w") as f: + with open(dispatcher_test_conf_fn) as f_default: + f.write(f_default.read()) + + f.write('\n vo_options:' + '\n vo_mysql_pg_host: "localhost"' + '\n vo_mysql_pg_user: "user"' + '\n vo_mysql_pg_password: "password"' + '\n vo_mysql_pg_db: "database"' + '\n vo_psql_pg_host: "localhost"' + '\n vo_psql_pg_user: "user"' + '\n vo_psql_pg_password: "password"' + '\n vo_psql_pg_db: "database"') + + yield fn + + @pytest.fixture def dispatcher_test_conf_with_matrix_options_fn(dispatcher_test_conf_fn): fn = "test-dispatcher-conf-with-matrix-options.yaml" @@ -708,10 +750,21 @@ def dispatcher_test_conf_with_gallery(dispatcher_test_conf_with_gallery_fn): yield yaml.load(open(dispatcher_test_conf_with_gallery_fn), Loader=yaml.SafeLoader)['dispatcher'] +@pytest.fixture +def dispatcher_test_conf_with_gallery_invalid_local_resolver(dispatcher_test_conf_with_gallery_invalid_local_resolver_fn): + yield yaml.load(open(dispatcher_test_conf_with_gallery_invalid_local_resolver_fn), Loader=yaml.SafeLoader)['dispatcher'] + + +@pytest.fixture +def dispatcher_test_conf_with_vo_options(dispatcher_test_conf_with_vo_options_fn): + yield yaml.load(open(dispatcher_test_conf_with_vo_options_fn), Loader=yaml.SafeLoader)['dispatcher'] + + @pytest.fixture def dispatcher_test_conf_with_matrix_options(dispatcher_test_conf_with_matrix_options_fn): yield yaml.load(open(dispatcher_test_conf_with_matrix_options_fn), Loader=yaml.SafeLoader)['dispatcher'] + @pytest.fixture def dispatcher_test_conf_with_gallery_no_resolver(dispatcher_test_conf_with_gallery_no_resolver_fn): yield yaml.load(open(dispatcher_test_conf_with_gallery_no_resolver_fn), Loader=yaml.SafeLoader)['dispatcher'] @@ -1120,6 +1173,20 @@ def dispatcher_live_fixture_with_gallery_no_resolver(pytestconfig, dispatcher_te os.kill(pid, signal.SIGINT) +@pytest.fixture +def dispatcher_live_fixture_with_gallery_invalid_local_resolver(pytestconfig, dispatcher_test_conf_with_gallery_invalid_local_resolver_fn, + dispatcher_debug): + dispatcher_state = start_dispatcher(pytestconfig.rootdir, dispatcher_test_conf_with_gallery_invalid_local_resolver_fn) + + service = dispatcher_state['url'] + pid = dispatcher_state['pid'] + + yield service + + kill_child_processes(pid, signal.SIGINT) + os.kill(pid, signal.SIGINT) + + @pytest.fixture def dispatcher_live_fixture_no_products_url(pytestconfig, dispatcher_test_conf_no_products_url_fn, dispatcher_debug): dispatcher_state = start_dispatcher(pytestconfig.rootdir, dispatcher_test_conf_no_products_url_fn) diff --git a/tests/conftest.py b/tests/conftest.py index 1f6659bdc..25e5194bc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,6 +11,7 @@ dispatcher_live_fixture_no_debug_mode, dispatcher_live_fixture_with_gallery, dispatcher_live_fixture_with_gallery_no_resolver, + dispatcher_live_fixture_with_gallery_invalid_local_resolver, dispatcher_long_living_fixture, gunicorn_dispatcher_long_living_fixture, dispatcher_long_living_fixture_with_matrix_options, @@ -18,9 +19,13 @@ gunicorn_dispatcher_long_living_fixture_with_matrix_options, dispatcher_test_conf, dispatcher_test_conf_with_gallery, + dispatcher_test_conf_with_vo_options, dispatcher_test_conf_with_gallery_no_resolver, + dispatcher_test_conf_with_gallery_invalid_local_resolver, dispatcher_test_conf_empty_sentry_fn, dispatcher_test_conf_with_gallery_fn, + dispatcher_test_conf_with_gallery_invalid_local_resolver_fn, + dispatcher_test_conf_with_vo_options_fn, dispatcher_test_conf_with_gallery_no_resolver_fn, dispatcher_live_fixture_with_external_products_url, dispatcher_live_fixture_with_default_route_products_url, diff --git a/tests/test_configurer.py b/tests/test_configurer.py index c5f81c1c7..7f77dc3a4 100644 --- a/tests/test_configurer.py +++ b/tests/test_configurer.py @@ -98,7 +98,9 @@ def test_confenv_legacy_plugin_keys(caplog): def test_config_no_resolver_urls(dispatcher_test_conf_with_gallery_no_resolver_fn): conf = ConfigEnv.from_conf_file(dispatcher_test_conf_with_gallery_no_resolver_fn) - assert hasattr(conf, 'name_resolver_url') - assert conf.name_resolver_url is not None + assert hasattr(conf, 'local_name_resolver_url') + assert conf.local_name_resolver_url is not None assert hasattr(conf, 'entities_portal_url') assert conf.entities_portal_url is not None + assert hasattr(conf, 'external_name_resolver_url') + assert conf.external_name_resolver_url is not None diff --git a/tests/test_server_basic.py b/tests/test_server_basic.py index 25af7d2fb..192d7141e 100644 --- a/tests/test_server_basic.py +++ b/tests/test_server_basic.py @@ -2,6 +2,7 @@ import shutil import urllib import io + import requests import time import uuid @@ -11,6 +12,7 @@ import jwt import glob import pytest +import fcntl from datetime import datetime, timedelta from dateutil import parser, tz from functools import reduce @@ -320,6 +322,55 @@ def test_error_two_scratch_dir_same_job_id(dispatcher_live_fixture): os.rmdir(fake_scratch_dir) +@pytest.mark.not_safe_parallel +@pytest.mark.fast +def test_scratch_dir_creation_lock_error(dispatcher_live_fixture): + DispatcherJobState.remove_scratch_folders() + server = dispatcher_live_fixture + logger.info("constructed server: %s", server) + + encoded_token = jwt.encode(default_token_payload, secret_key, algorithm='HS256') + # issuing a request each, with the same set of parameters + params = dict( + query_status="new", + query_type="Real", + instrument="empty-async", + product_type="dummy", + token=encoded_token + ) + DataServerQuery.set_status('submitted') + # let's generate a fake scratch dir + jdata = ask(server, + params, + expected_query_status=["submitted"], + max_time_s=50, + ) + + job_id = jdata['job_monitor']['job_id'] + session_id = jdata['session_id'] + fake_scratch_dir = f'scratch_sid_01234567890_jid_{job_id}' + os.makedirs(fake_scratch_dir) + + params['job_id'] = job_id + params['session_id'] = session_id + + lock_file = f".lock_{job_id}" + + with open(lock_file, 'w') as f_lock: + fcntl.flock(f_lock, fcntl.LOCK_EX) + + jdata = ask(server, + params, + expected_status_code=500, + expected_query_status=None, + ) + scratch_dir_retry_attempts = 5 + assert jdata['error'] == f"InternalError():Failed to acquire lock for directory creation after {scratch_dir_retry_attempts} attempts." + assert jdata['error_message'] == f"Failed to acquire lock for directory creation after {scratch_dir_retry_attempts} attempts." + os.rmdir(fake_scratch_dir) + os.remove(lock_file) + + @pytest.mark.fast def test_same_request_different_users(dispatcher_live_fixture): server = dispatcher_live_fixture @@ -2678,7 +2729,7 @@ def test_source_resolver(dispatcher_live_fixture_with_gallery, dispatcher_test_c # the name resolver replaces automatically underscores with spaces in the returned name assert resolved_obj['name'] == source_to_resolve - assert resolved_obj['message'] == f'{source_to_resolve} could not be resolved' + assert resolved_obj['message'].startswith(f'{source_to_resolve} could not be resolved') else: assert 'name' in resolved_obj assert 'DEC' in resolved_obj @@ -2686,12 +2737,61 @@ def test_source_resolver(dispatcher_live_fixture_with_gallery, dispatcher_test_c assert 'entity_portal_link' in resolved_obj assert 'object_ids' in resolved_obj assert 'object_type' in resolved_obj + assert 'message' in resolved_obj assert resolved_obj['name'] == source_to_resolve.replace('_', ' ') assert resolved_obj['entity_portal_link'] == dispatcher_test_conf_with_gallery["product_gallery_options"]["entities_portal_url"]\ .format(urllib.parse.quote(source_to_resolve.strip())) +@pytest.mark.test_drupal +@pytest.mark.parametrize("source_to_resolve", ['Mrk 421', 'Mrk_421', 'GX 1+4', 'fake object', None]) +def test_source_resolver_invalid_local_resolver(dispatcher_live_fixture_with_gallery_invalid_local_resolver, dispatcher_test_conf_with_gallery_invalid_local_resolver, source_to_resolve): + server = dispatcher_live_fixture_with_gallery_invalid_local_resolver + + logger.info("constructed server: %s", server) + + # let's generate a valid token + token_payload = { + **default_token_payload, + "roles": "general, gallery contributor", + } + encoded_token = jwt.encode(token_payload, secret_key, algorithm='HS256') + + params = {'name': source_to_resolve, + 'token': encoded_token} + + c = requests.get(os.path.join(server, "resolve_name"), + params={**params} + ) + + assert c.status_code == 200 + resolved_obj = c.json() + print('Resolved object returned: ', resolved_obj) + + if source_to_resolve is None: + assert resolved_obj == {} + elif source_to_resolve == 'fake object': + assert 'name' in resolved_obj + assert 'message' in resolved_obj + + # the name resolver replaces automatically underscores with spaces in the returned name + assert resolved_obj['name'] == source_to_resolve + assert resolved_obj['message'].startswith(f'{source_to_resolve} could not be resolved') + else: + assert 'name' in resolved_obj + assert 'DEC' in resolved_obj + assert 'RA' in resolved_obj + assert 'entity_portal_link' in resolved_obj + assert 'object_ids' in resolved_obj + assert 'object_type' in resolved_obj + assert 'message' in resolved_obj + + assert resolved_obj['name'] == source_to_resolve.replace('_', ' ') + assert resolved_obj['entity_portal_link'] == dispatcher_test_conf_with_gallery_invalid_local_resolver["product_gallery_options"]["entities_portal_url"]\ + .format(urllib.parse.quote(source_to_resolve.strip())) + + @pytest.mark.test_drupal @pytest.mark.parametrize("type_group", ['instruments', 'Instruments', 'products', 'sources', 'aaaaaa', '', None]) @pytest.mark.parametrize("parent", ['isgri', 'production', 'all', 'aaaaaa', '', None])