From 92220bad86cf5513de2a0d71749053e64346a63c Mon Sep 17 00:00:00 2001 From: sam Date: Fri, 22 Nov 2019 22:39:21 +0100 Subject: [PATCH] wip --- conftest.py | 93 ++++++++++++++++++++- renku/cli/__init__.py | 2 +- renku/service/config.py | 1 + renku/service/serializers/cache.py | 12 ++- renku/service/utils/__init__.py | 1 + renku/service/views/cache.py | 17 ++-- renku/service/views/datasets.py | 8 +- renku/service/views/decorators.py | 33 +++++++- tests/service/test_cache_views.py | 125 ++++++++++++++++++++-------- tests/service/test_dataset_views.py | 71 ++++++++-------- tests/service/test_exceptions.py | 60 +++++++++++++ 11 files changed, 335 insertions(+), 88 deletions(-) create mode 100644 tests/service/test_exceptions.py diff --git a/conftest.py b/conftest.py index 6faf45a284..483d2b6dbb 100644 --- a/conftest.py +++ b/conftest.py @@ -580,16 +580,22 @@ def svc_client(mock_redis): def svc_client_with_repo(svc_client, mock_redis): """Renku service remote repository.""" remote_url = 'https://renkulab.io/gitlab/contact/integration-tests.git' - headers = {'Authorization': 'Bearer b4b4de0eda0f471ab82702bd5c367fa7'} + headers = { + 'Content-Type': 'application/json', + 'accept': 'application/json', + 'Authorization': 'Bearer b4b4de0eda0f471ab82702bd5c367fa7', + } - params = { + payload = { 'git_url': remote_url, 'git_username': 'contact', 'git_access_token': 'EcfPJvEqjJepyu6XyqKZ', } response = svc_client.post( - '/cache/project-clone', data=params, headers=headers + '/cache/project-clone', + data=json.dumps(payload), + headers=headers, ) assert response @@ -599,3 +605,84 @@ def svc_client_with_repo(svc_client, mock_redis): assert isinstance(uuid.UUID(project_id), uuid.UUID) yield svc_client, headers, project_id + + +@pytest.fixture( + params=[ + { + 'url': '/cache/files-list', + 'allowed_method': 'GET', + 'headers': { + 'Content-Type': 'application/json', + 'accept': 'application/json', + } + }, + { + 'url': '/cache/files-upload', + 'allowed_method': 'POST', + 'headers': {} + }, + { + 'url': '/cache/project-clone', + 'allowed_method': 'POST', + 'headers': { + 'Content-Type': 'application/json', + 'accept': 'application/json', + } + }, + { + 'url': '/cache/project-list', + 'allowed_method': 'GET', + 'headers': { + 'Content-Type': 'application/json', + 'accept': 'application/json', + } + }, + { + 'url': '/datasets/add', + 'allowed_method': 'POST', + 'headers': { + 'Content-Type': 'application/json', + 'accept': 'application/json', + } + }, + { + 'url': '/datasets/create', + 'allowed_method': 'POST', + 'headers': { + 'Content-Type': 'application/json', + 'accept': 'application/json', + } + }, + { + 'url': '/datasets/files-list', + 'allowed_method': 'GET', + 'headers': { + 'Content-Type': 'application/json', + 'accept': 'application/json', + } + }, + { + 'url': '/datasets/list', + 'allowed_method': 'GET', + 'headers': { + 'Content-Type': 'application/json', + 'accept': 'application/json', + } + }, + ] +) +def service_allowed_endpoint(request, svc_client, mock_redis): + """Ensure allowed methods and correct headers.""" + methods = { + 'GET': svc_client.get, + 'POST': svc_client.post, + 'HEAD': svc_client.head, + 'PUT': svc_client.put, + 'DELETE': svc_client.delete, + 'OPTIONS': svc_client.options, + 'TRACE': svc_client.trace, + 'PATCH': svc_client.patch, + } + + yield methods, request.param, svc_client diff --git a/renku/cli/__init__.py b/renku/cli/__init__.py index 3a33d746d6..e862182ef5 100644 --- a/renku/cli/__init__.py +++ b/renku/cli/__init__.py @@ -90,7 +90,7 @@ option_use_external_storage from renku.core.commands.version import check_version, print_version from renku.core.management.client import LocalClient -from renku.core.management.config import ConfigManagerMixin, RENKU_HOME +from renku.core.management.config import RENKU_HOME, ConfigManagerMixin from renku.core.management.repository import default_path #: Monkeypatch Click application. diff --git a/renku/service/config.py b/renku/service/config.py index 1231cb4b51..d43b352a80 100644 --- a/renku/service/config.py +++ b/renku/service/config.py @@ -26,6 +26,7 @@ RENKU_EXCEPTION_ERROR_CODE = -32100 REDIS_EXCEPTION_ERROR_CODE = -32200 +INVALID_HEADERS_ERROR_CODE = -32601 INVALID_PARAMS_ERROR_CODE = -32602 INTERNAL_FAILURE_ERROR_CODE = -32603 diff --git a/renku/service/serializers/cache.py b/renku/service/serializers/cache.py index df3b81e12d..ef339fea7e 100644 --- a/renku/service/serializers/cache.py +++ b/renku/service/serializers/cache.py @@ -95,14 +95,18 @@ class FileListResponseRPC(JsonRPCResponse): class ProjectCloneRequest(Schema): """Request schema for project clone.""" - project_id = fields.String(missing=lambda: uuid.uuid4().hex) - name = fields.String(required=True) - owner = fields.String(required=True) - git_url = fields.String(required=True) git_username = fields.String(required=True) git_access_token = fields.String(required=True) + +class ProjectCloneDetails(ProjectCloneRequest): + """Details schema for project clone.""" + + project_id = fields.String(missing=lambda: uuid.uuid4().hex) + name = fields.String(required=True) + owner = fields.String(required=True) + @validates('git_url') def validate_git_url(self, value): """Validates git url.""" diff --git a/renku/service/utils/__init__.py b/renku/service/utils/__init__.py index 21eb0364ec..23bb0efc4f 100644 --- a/renku/service/utils/__init__.py +++ b/renku/service/utils/__init__.py @@ -17,6 +17,7 @@ # limitations under the License. """Renku service utility functions.""" from git import Repo + from renku.service.config import CACHE_PROJECTS_PATH, CACHE_UPLOADS_PATH diff --git a/renku/service/views/cache.py b/renku/service/views/cache.py index 1fc04c330e..377555c31b 100644 --- a/renku/service/views/cache.py +++ b/renku/service/views/cache.py @@ -31,11 +31,11 @@ INVALID_PARAMS_ERROR_CODE, SUPPORTED_ARCHIVES from renku.service.serializers.cache import FileListResponse, \ FileListResponseRPC, FileUploadDetails, FileUploadRequest, \ - FileUploadResponse, FileUploadResponseRPC, ProjectCloneRequest, \ - ProjectCloneResponse, ProjectCloneResponseRPC, ProjectListResponse, \ - ProjectListResponseRPC, extract_file + FileUploadResponse, FileUploadResponseRPC, ProjectCloneDetails, \ + ProjectCloneRequest, ProjectCloneResponse, ProjectCloneResponseRPC, \ + ProjectListResponse, ProjectListResponseRPC, extract_file from renku.service.utils import make_file_path, make_project_path -from renku.service.views.decorators import handle_base_except, \ +from renku.service.views.decorators import accepts_json, handle_base_except, \ handle_git_except, handle_renku_except, handle_validation_except, \ header_doc, requires_cache, requires_identity @@ -156,7 +156,10 @@ def upload_file_view(user, cache): @use_kwargs(ProjectCloneRequest) @marshal_with(ProjectCloneResponseRPC) -@header_doc('Clone a remote project.', tags=(CACHE_BLUEPRINT_TAG, )) +@header_doc( + 'Clone a remote project. If the project is cached already, ' + 'new clone operation will override the old cache state.', + tags=(CACHE_BLUEPRINT_TAG, )) @cache_blueprint.route( '/cache/project-clone', methods=['POST'], @@ -168,10 +171,10 @@ def upload_file_view(user, cache): @handle_validation_except @requires_cache @requires_identity +@accepts_json def project_clone(user, cache): """Clone a remote repository.""" - ctx = ProjectCloneRequest().load(dict(request.form)) - + ctx = ProjectCloneDetails().load(request.json) local_path = make_project_path(user, ctx) if local_path.exists(): shutil.rmtree(local_path) diff --git a/renku/service/views/datasets.py b/renku/service/views/datasets.py index 61d3565964..25f7176fa6 100644 --- a/renku/service/views/datasets.py +++ b/renku/service/views/datasets.py @@ -34,7 +34,7 @@ DatasetFilesListResponseRPC, DatasetListRequest, DatasetListResponse, \ DatasetListResponseRPC from renku.service.utils import make_file_path, make_project_path, repo_sync -from renku.service.views.decorators import handle_base_except, \ +from renku.service.views.decorators import accepts_json, handle_base_except, \ handle_git_except, handle_renku_except, handle_validation_except, \ header_doc, requires_cache, requires_identity @@ -137,11 +137,12 @@ def list_dataset_files_view(user, cache): @handle_git_except @handle_renku_except @handle_validation_except +@accepts_json @requires_cache @requires_identity def add_file_to_dataset_view(user, cache): """Add uploaded file to cloned repository.""" - ctx = DatasetAddRequest().load(request.form) + ctx = DatasetAddRequest().load(request.json) project = cache.get_project(user, ctx['project_id']) file = cache.get_file(user, ctx['file_id']) @@ -196,11 +197,12 @@ def add_file_to_dataset_view(user, cache): @handle_git_except @handle_renku_except @handle_validation_except +@accepts_json @requires_cache @requires_identity def create_dataset_view(user, cache): """Create a new dataset in a project.""" - ctx = DatasetCreateRequest().load(request.form) + ctx = DatasetCreateRequest().load(request.json) project = cache.get_project(user, ctx['project_id']) project_path = make_project_path(user, project) diff --git a/renku/service/views/decorators.py b/renku/service/views/decorators.py index 62013993d8..8313ca4370 100644 --- a/renku/service/views/decorators.py +++ b/renku/service/views/decorators.py @@ -28,8 +28,8 @@ from renku.core.errors import RenkuException from renku.service.config import GIT_ACCESS_DENIED_ERROR_CODE, \ GIT_UNKNOWN_ERROR_CODE, INTERNAL_FAILURE_ERROR_CODE, \ - INVALID_PARAMS_ERROR_CODE, REDIS_EXCEPTION_ERROR_CODE, \ - RENKU_EXCEPTION_ERROR_CODE + INVALID_HEADERS_ERROR_CODE, INVALID_PARAMS_ERROR_CODE, \ + REDIS_EXCEPTION_ERROR_CODE, RENKU_EXCEPTION_ERROR_CODE def requires_identity(f): @@ -42,7 +42,7 @@ def decorated_function(*args, **kws): if user and not user[0]: return jsonify( error={ - 'code': INVALID_PARAMS_ERROR_CODE, + 'code': INVALID_HEADERS_ERROR_CODE, 'reason': 'user identification is missing' } ) @@ -146,6 +146,33 @@ def decorated_function(*args, **kwargs): return decorated_function +def accepts_json(f): + """Wrapper which ensures only JSON payload can be in request.""" + # nowa + @wraps(f) + def decorated_function(*args, **kwargs): + """Represents decorated function.""" + if 'Content-Type' not in request.headers: + return jsonify( + error={ + 'code': INVALID_HEADERS_ERROR_CODE, + 'reason': 'invalid request headers' + } + ) + + header_check = request.headers['Content-Type'] == 'application/json' + + if not request.is_json or not header_check: + return jsonify(error={ + 'code': INVALID_HEADERS_ERROR_CODE, + 'reason': 'invalid request payload' + }) + + return f(*args, **kwargs) + + return decorated_function + + def handle_base_except(f): """Wrapper which handles base exceptions.""" # nowa diff --git a/tests/service/test_cache_views.py b/tests/service/test_cache_views.py index 94f52eb163..b9261ea23a 100644 --- a/tests/service/test_cache_views.py +++ b/tests/service/test_cache_views.py @@ -17,19 +17,27 @@ # limitations under the License. """Renku service cache view tests.""" import io +import json import uuid import pytest from renku.core.models.git import GitURL from renku.service.config import GIT_ACCESS_DENIED_ERROR_CODE, \ - INVALID_PARAMS_ERROR_CODE + INVALID_HEADERS_ERROR_CODE, INVALID_PARAMS_ERROR_CODE @pytest.mark.service def test_serve_api_spec(svc_client): """Check serving of service spec.""" - response = svc_client.get('/api/v1/spec') + headers = { + 'Content-Type': 'application/json', + 'accept': 'application/json', + } + response = svc_client.get( + '/api/v1/spec', + headers=headers + ) assert 0 != len(response.json.keys()) assert 200 == response.status_code @@ -38,7 +46,11 @@ def test_serve_api_spec(svc_client): @pytest.mark.service def test_list_upload_files_all(svc_client): """Check list uploaded files view.""" - headers_user = {'Authorization': 'bearer user'} + headers_user = { + 'Content-Type': 'application/json', + 'accept': 'application/json', + 'Authorization': 'bearer user' + } response = svc_client.get('/cache/files-list', headers=headers_user) assert {'result'} == set(response.json.keys()) @@ -50,18 +62,27 @@ def test_list_upload_files_all(svc_client): @pytest.mark.service def test_list_upload_files_all_no_auth(svc_client): """Check error response on list uploaded files view.""" - response = svc_client.get('/cache/files-list') + headers = { + 'Content-Type': 'application/json', + 'accept': 'application/json', + } + response = svc_client.get( + '/cache/files-list', + headers=headers, + ) assert 200 == response.status_code assert {'error'} == set(response.json.keys()) - assert INVALID_PARAMS_ERROR_CODE == response.json['error']['code'] + assert INVALID_HEADERS_ERROR_CODE == response.json['error']['code'] @pytest.mark.service def test_file_upload(svc_client): """Check successful file upload.""" - headers_user = {'Authorization': 'bearer {0}'.format(uuid.uuid4().hex)} + headers_user = { + 'Authorization': 'bearer {0}'.format(uuid.uuid4().hex) + } response = svc_client.post( '/cache/files-upload', @@ -81,7 +102,9 @@ def test_file_upload(svc_client): @pytest.mark.service def test_file_upload_override(svc_client): """Check successful file upload.""" - headers_user = {'Authorization': 'bearer {0}'.format(uuid.uuid4().hex)} + headers_user = { + 'Authorization': 'bearer {0}'.format(uuid.uuid4().hex), + } response = svc_client.post( '/cache/files-upload', @@ -131,7 +154,9 @@ def test_file_upload_override(svc_client): @pytest.mark.service def test_file_upload_same_file(svc_client): """Check successful file upload with same file uploaded twice.""" - headers_user1 = {'Authorization': 'bearer {0}'.format(uuid.uuid4().hex)} + headers_user1 = { + 'Authorization': 'bearer {0}'.format(uuid.uuid4().hex), + } response = svc_client.post( '/cache/files-upload', data=dict(file=(io.BytesIO(b'this is a test'), 'datafile.txt'), ), @@ -172,14 +197,18 @@ def test_file_upload_no_auth(svc_client): assert 200 == response.status_code assert {'error'} == set(response.json.keys()) - assert INVALID_PARAMS_ERROR_CODE == response.json['error']['code'] + assert INVALID_HEADERS_ERROR_CODE == response.json['error']['code'] @pytest.mark.service def test_file_upload_with_users(svc_client): """Check successful file upload and listing based on user auth header.""" - headers_user1 = {'Authorization': 'bearer {0}'.format(uuid.uuid4().hex)} - headers_user2 = {'Authorization': 'bearer {0}'.format(uuid.uuid4().hex)} + headers_user1 = { + 'Authorization': 'bearer {0}'.format(uuid.uuid4().hex), + } + headers_user2 = { + 'Authorization': 'bearer {0}'.format(uuid.uuid4().hex) + } response = svc_client.post( '/cache/files-upload', @@ -202,7 +231,10 @@ def test_file_upload_with_users(svc_client): assert response assert {'result'} == set(response.json.keys()) - response = svc_client.get('/cache/files-list', headers=headers_user1) + response = svc_client.get( + '/cache/files-list', + headers=headers_user1 + ) assert response @@ -220,22 +252,31 @@ def test_clone_projects_no_auth(svc_client): """Check error on cloning of remote repository.""" remote_url = 'https://renkulab.io/gitlab/contact/integration-tests.git' - params = { + payload = { 'git_url': remote_url, 'git_username': 'contact', 'git_access_token': 'notatoken', } - response = svc_client.post('/cache/project-clone', data=params) + response = svc_client.post( + '/cache/project-clone', + data=json.dumps(payload) + ) assert {'error'} == set(response.json.keys()) - assert INVALID_PARAMS_ERROR_CODE == response.json['error']['code'] + assert INVALID_HEADERS_ERROR_CODE == response.json['error']['code'] assert 'user identification is missing' == response.json['error']['reason'] - headers = {'Authorization': 'bearer {0}'.format(uuid.uuid4().hex)} + headers = { + 'Content-Type': 'application/json', + 'accept': 'application/json', + 'Authorization': 'bearer {0}'.format(uuid.uuid4().hex), + } response = svc_client.post( - '/cache/project-clone', data=params, headers=headers + '/cache/project-clone', + data=json.dumps(payload), + headers=headers ) assert {'error'} == set(response.json.keys()) @@ -249,16 +290,22 @@ def test_clone_projects_no_auth(svc_client): def test_clone_projects_with_auth(svc_client): """Check cloning of remote repository.""" remote_url = 'https://renkulab.io/gitlab/contact/integration-tests.git' - headers = {'Authorization': 'bearer {0}'.format(uuid.uuid4().hex)} + headers = { + 'Content-Type': 'application/json', + 'accept': 'application/json', + 'Authorization': 'bearer {0}'.format(uuid.uuid4().hex), + } - params = { + payload = { 'git_username': 'contact', 'git_access_token': 'EcfPJvEqjJepyu6XyqKZ', 'git_url': remote_url } response = svc_client.post( - '/cache/project-clone', data=params, headers=headers + '/cache/project-clone', + data=json.dumps(payload), + headers=headers ) assert response @@ -270,16 +317,22 @@ def test_clone_projects_with_auth(svc_client): def test_clone_projects_list_view_errors(svc_client): """Check cache state of cloned projects with no headers.""" remote_url = 'https://renkulab.io/gitlab/contact/integration-tests.git' - headers = {'Authorization': 'bearer {0}'.format(uuid.uuid4().hex)} + headers = { + 'Content-Type': 'application/json', + 'accept': 'application/json', + 'Authorization': 'bearer {0}'.format(uuid.uuid4().hex), + } - params = { + payload = { 'git_url': remote_url, 'git_username': 'contact', 'git_access_token': 'EcfPJvEqjJepyu6XyqKZ', } response = svc_client.post( - '/cache/project-clone', data=params, headers=headers + '/cache/project-clone', + data=json.dumps(payload), + headers=headers ) assert response assert {'result'} == set(response.json.keys()) @@ -295,7 +348,7 @@ def test_clone_projects_list_view_errors(svc_client): assert response assert {'error'} == set(response.json.keys()) - assert INVALID_PARAMS_ERROR_CODE == response.json['error']['code'] + assert INVALID_HEADERS_ERROR_CODE == response.json['error']['code'] response = svc_client.get('/cache/project-list', headers=headers) assert response @@ -312,16 +365,22 @@ def test_clone_projects_list_view_errors(svc_client): def test_clone_projects_invalid_auth(svc_client): """Check cache state of cloned projects with invalid auth.""" remote_url = 'https://renkulab.io/gitlab/contact/integration-tests.git' - headers = {'Authorization': 'bearer b4b4de0eda0f471ab82702bd5c367fa7'} + headers = { + 'Content-Type': 'application/json', + 'accept': 'application/json', + 'Authorization': 'bearer b4b4de0eda0f471ab82702bd5c367fa7', + } - params = { + payload = { 'git_url': remote_url, 'git_username': 'notsam', 'git_access_token': 'notvalidtoken', } response = svc_client.post( - '/cache/project-clone', data=params, headers=headers + '/cache/project-clone', + data=json.dumps(payload), + headers=headers, ) assert response @@ -335,7 +394,7 @@ def test_clone_projects_invalid_auth(svc_client): assert response assert {'error'} == set(response.json.keys()) - assert INVALID_PARAMS_ERROR_CODE == response.json['error']['code'] + assert INVALID_HEADERS_ERROR_CODE == response.json['error']['code'] response = svc_client.get('/cache/project-list', headers=headers) assert response @@ -357,7 +416,7 @@ def test_upload_zip_unpack_archive(datapack_zip, svc_client_with_repo): 'unpack_archive': True, 'override_existing': True, }, - headers=headers + headers={'Authorization': headers['Authorization']} ) assert response @@ -385,7 +444,7 @@ def test_upload_zip_archive(datapack_zip, svc_client_with_repo): 'unpack_archive': False, 'override_existing': True, }, - headers=headers + headers={'Authorization': headers['Authorization']} ) assert response @@ -413,7 +472,7 @@ def test_upload_tar_unpack_archive(datapack_tar, svc_client_with_repo): 'unpack_archive': True, 'override_existing': True, }, - headers=headers + headers={'Authorization': headers['Authorization']} ) assert response @@ -441,7 +500,7 @@ def test_upload_tar_archive(datapack_tar, svc_client_with_repo): 'unpack_archive': False, 'override_existing': True, }, - headers=headers + headers={'Authorization': headers['Authorization']} ) assert response @@ -469,7 +528,7 @@ def test_field_upload_resp_fields(datapack_tar, svc_client_with_repo): 'unpack_archive': True, 'override_existing': True, }, - headers=headers + headers={'Authorization': headers['Authorization']} ) assert response diff --git a/tests/service/test_dataset_views.py b/tests/service/test_dataset_views.py index db0b8ff906..fc0b341719 100644 --- a/tests/service/test_dataset_views.py +++ b/tests/service/test_dataset_views.py @@ -17,12 +17,13 @@ # limitations under the License. """Renku service dataset view tests.""" import io +import json import uuid import pytest -from renku.service.config import INVALID_PARAMS_ERROR_CODE, \ - RENKU_EXCEPTION_ERROR_CODE +from renku.service.config import INVALID_HEADERS_ERROR_CODE, \ + INVALID_PARAMS_ERROR_CODE, RENKU_EXCEPTION_ERROR_CODE @pytest.mark.service @@ -31,14 +32,14 @@ def test_create_dataset_view(svc_client_with_repo): """Create new dataset successfully.""" svc_client, headers, project_id = svc_client_with_repo - params = { + payload = { 'project_id': project_id, 'dataset_name': '{0}'.format(uuid.uuid4().hex), } response = svc_client.post( '/datasets/create', - data=params, + data=json.dumps(payload), headers=headers, ) @@ -46,7 +47,7 @@ def test_create_dataset_view(svc_client_with_repo): assert {'result'} == set(response.json.keys()) assert {'dataset_name'} == set(response.json['result'].keys()) - assert params['dataset_name'] == response.json['result']['dataset_name'] + assert payload['dataset_name'] == response.json['result']['dataset_name'] @pytest.mark.service @@ -55,14 +56,14 @@ def test_create_dataset_view_dataset_exists(svc_client_with_repo): """Create new dataset which already exists.""" svc_client, headers, project_id = svc_client_with_repo - params = { + payload = { 'project_id': project_id, 'dataset_name': 'my-dataset', } response = svc_client.post( '/datasets/create', - data=params, + data=json.dumps(payload), headers=headers, ) @@ -79,7 +80,7 @@ def test_create_dataset_view_unknown_param(svc_client_with_repo): """Create new dataset by specifying unknown parameters.""" svc_client, headers, project_id = svc_client_with_repo - params = { + payload = { 'project_id': project_id, 'dataset_name': 'my-dataset', 'remote_name': 'origin' @@ -87,7 +88,7 @@ def test_create_dataset_view_unknown_param(svc_client_with_repo): response = svc_client.post( '/datasets/create', - data=params, + data=json.dumps(payload), headers=headers, ) @@ -102,9 +103,9 @@ def test_create_dataset_view_unknown_param(svc_client_with_repo): @pytest.mark.integration def test_create_dataset_with_no_identity(svc_client_with_repo): """Create new dataset with no identification provided.""" - svc_client, _, project_id = svc_client_with_repo + svc_client, headers, project_id = svc_client_with_repo - params = { + payload = { 'project_id': project_id, 'dataset_name': 'my-dataset', 'remote_name': 'origin', @@ -112,14 +113,15 @@ def test_create_dataset_with_no_identity(svc_client_with_repo): response = svc_client.post( '/datasets/create', - data=params, + data=json.dumps(payload), + headers={'Content-Type': headers['Content-Type']} # no user identity, expect error ) assert response assert {'error'} == response.json.keys() - assert INVALID_PARAMS_ERROR_CODE == response.json['error']['code'] + assert INVALID_HEADERS_ERROR_CODE == response.json['error']['code'] assert 'user identification is missing' == response.json['error']['reason'] @@ -128,7 +130,7 @@ def test_create_dataset_with_no_identity(svc_client_with_repo): def test_add_file_view_with_no_identity(svc_client_with_repo): """Check identity error raise in dataset add.""" svc_client, headers, project_id = svc_client_with_repo - params = { + payload = { 'project_id': project_id, 'dataset_name': 'my-dataset', 'remote_name': 'origin', @@ -136,14 +138,14 @@ def test_add_file_view_with_no_identity(svc_client_with_repo): response = svc_client.post( '/datasets/add', - data=params, + data=json.dumps(payload), + headers={'Content-Type': headers['Content-Type']} # no user identity, expect error ) - assert response - assert {'error'} == set(response.json.keys()) - assert INVALID_PARAMS_ERROR_CODE == response.json['error']['code'] + assert {'error'} == set(response.json.keys()) + assert INVALID_HEADERS_ERROR_CODE == response.json['error']['code'] assert 'user identification is missing' == response.json['error']['reason'] @@ -157,7 +159,7 @@ def test_add_file_view(svc_client_with_repo): '/cache/files-upload', data=dict(file=(io.BytesIO(b'this is a test'), 'datafile1.txt'), ), query_string={'override_existing': True}, - headers=headers + headers={'Authorization': headers['Authorization']} ) assert response @@ -167,7 +169,7 @@ def test_add_file_view(svc_client_with_repo): file_id = response.json['result']['files'][0]['file_id'] assert isinstance(uuid.UUID(file_id), uuid.UUID) - params = { + payload = { 'project_id': project_id, 'dataset_name': 'my-dataset', 'file_id': file_id, @@ -175,7 +177,7 @@ def test_add_file_view(svc_client_with_repo): response = svc_client.post( '/datasets/add', - data=params, + data=json.dumps(payload), headers=headers, ) @@ -237,14 +239,14 @@ def test_create_and_list_datasets_view(svc_client_with_repo): """Create and list created dataset.""" svc_client, headers, project_id = svc_client_with_repo - params = { + payload = { 'project_id': project_id, 'dataset_name': '{0}'.format(uuid.uuid4().hex), } response = svc_client.post( '/datasets/create', - data=params, + data=json.dumps(payload), headers=headers, ) @@ -252,7 +254,7 @@ def test_create_and_list_datasets_view(svc_client_with_repo): assert {'result'} == set(response.json.keys()) assert {'dataset_name'} == set(response.json['result'].keys()) - assert params['dataset_name'] == response.json['result']['dataset_name'] + assert payload['dataset_name'] == response.json['result']['dataset_name'] params_list = { 'project_id': project_id, @@ -272,7 +274,7 @@ def test_create_and_list_datasets_view(svc_client_with_repo): assert {'identifier', 'name', 'version', 'created'} == set(response.json['result']['datasets'][0].keys()) - assert params['dataset_name'] in [ + assert payload['dataset_name'] in [ ds['name'] for ds in response.json['result']['datasets'] ] @@ -288,17 +290,18 @@ def test_list_dataset_files(svc_client_with_repo): '/cache/files-upload', data=dict(file=(io.BytesIO(b'this is a test'), file_name), ), query_string={'override_existing': True}, - headers=headers + headers={'Authorization': headers['Authorization']} ) assert response assert 200 == response.status_code + assert {'result'} == set(response.json.keys()) assert 1 == len(response.json['result']['files']) file_id = response.json['result']['files'][0]['file_id'] assert isinstance(uuid.UUID(file_id), uuid.UUID) - params = { + payload = { 'project_id': project_id, 'dataset_name': 'my-dataset', 'file_id': file_id, @@ -306,7 +309,7 @@ def test_list_dataset_files(svc_client_with_repo): response = svc_client.post( '/datasets/add', - data=params, + data=json.dumps(payload), headers=headers, ) @@ -354,7 +357,7 @@ def test_add_with_unpacked_archive(datapack_zip, svc_client_with_repo): 'unpack_archive': True, 'override_existing': True, }, - headers=headers + headers={'Authorization': headers['Authorization']} ) assert response @@ -371,14 +374,14 @@ def test_add_with_unpacked_archive(datapack_zip, svc_client_with_repo): assert file_id file_ = response.json['result']['files'][0] - params = { + payload = { 'project_id': project_id, 'dataset_name': '{0}'.format(uuid.uuid4().hex), } response = svc_client.post( '/datasets/create', - data=params, + data=json.dumps(payload), headers=headers, ) @@ -386,9 +389,9 @@ def test_add_with_unpacked_archive(datapack_zip, svc_client_with_repo): assert {'result'} == set(response.json.keys()) assert {'dataset_name'} == set(response.json['result'].keys()) - assert params['dataset_name'] == response.json['result']['dataset_name'] + assert payload['dataset_name'] == response.json['result']['dataset_name'] - params = { + payload = { 'project_id': project_id, 'dataset_name': 'my-dataset', 'file_id': file_['file_id'], @@ -396,7 +399,7 @@ def test_add_with_unpacked_archive(datapack_zip, svc_client_with_repo): response = svc_client.post( '/datasets/add', - data=params, + data=json.dumps(payload), headers=headers, ) diff --git a/tests/service/test_exceptions.py b/tests/service/test_exceptions.py new file mode 100644 index 0000000000..4e165bdca2 --- /dev/null +++ b/tests/service/test_exceptions.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 - Swiss Data Science Center (SDSC) +# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service exception tests for all endpoints.""" +import pytest + +from renku.service.config import INVALID_HEADERS_ERROR_CODE + + +@pytest.mark.service +def test_allowed_methods_exc(service_allowed_endpoint): + """Check allowed methods for every endpoint.""" + methods, request, svc_client = service_allowed_endpoint + + method = request['allowed_method'] + if method == 'GET': # if GET remove sister method HEAD + methods.pop(method) + methods.pop('HEAD') + else: + methods.pop(method) + + for method, fn in methods.items(): + response = fn(request['url']) + assert 405 == response.status_code + + +@pytest.mark.service +def test_auth_headers_exc(service_allowed_endpoint): + """Check correct headers for every endpoint.""" + methods, request, svc_client = service_allowed_endpoint + + method = request['allowed_method'] + if method == 'GET': # if GET remove sister method HEAD + client_method = methods.pop(method) + methods.pop('HEAD') + else: + client_method = methods.pop(method) + + response = client_method( + request['url'], + headers=request['headers'], + ) + + assert 200 == response.status_code + assert INVALID_HEADERS_ERROR_CODE == response.json['error']['code'] + assert 'user identification is missing' == response.json['error']['reason']