From 26cf3a66cc3258aecedd4d64d9729f8110e4fee4 Mon Sep 17 00:00:00 2001 From: Barak Amar Date: Mon, 18 Sep 2023 10:47:11 +0300 Subject: [PATCH] Remove create metadata and ingest range APIs --- api/swagger.yml | 146 +-------- clients/java/README.md | 7 +- clients/java/api/openapi.yaml | 226 +------------ clients/java/docs/AuthApi.md | 2 +- clients/java/docs/ImportApi.md | 197 +----------- clients/java/docs/ImportLocation.md | 2 +- clients/java/docs/ImportPagination.md | 16 - .../{ImportStatusResp.md => ImportStatus.md} | 2 +- .../java/docs/IngestRangeCreationResponse.md | 14 - clients/java/docs/ObjectStats.md | 2 +- clients/java/docs/StageRangeCreation.md | 17 - clients/java/docs/StagingLocation.md | 4 +- .../java/io/lakefs/clients/api/AuthApi.java | 8 +- .../java/io/lakefs/clients/api/ImportApi.java | 300 +----------------- .../clients/api/model/ImportLocation.java | 4 +- .../clients/api/model/ImportPagination.java | 185 ----------- ...mportStatusResp.java => ImportStatus.java} | 32 +- .../model/IngestRangeCreationResponse.java | 129 -------- .../lakefs/clients/api/model/ObjectStats.java | 4 +- .../clients/api/model/StageRangeCreation.java | 214 ------------- .../clients/api/model/StagingLocation.java | 8 +- .../io/lakefs/clients/api/ImportApiTest.java | 40 +-- .../api/model/ImportPaginationTest.java | 75 ----- ...tusRespTest.java => ImportStatusTest.java} | 12 +- .../IngestRangeCreationResponseTest.java | 61 ---- .../api/model/StageRangeCreationTest.java | 83 ----- clients/python/.openapi-generator/FILES | 15 +- clients/python/README.md | 7 +- clients/python/docs/AuthApi.md | 2 +- clients/python/docs/ImportApi.md | 247 +------------- clients/python/docs/ImportLocation.md | 2 +- clients/python/docs/ImportPagination.md | 15 - .../{ImportStatusResp.md => ImportStatus.md} | 2 +- .../docs/IngestRangeCreationResponse.md | 13 - clients/python/docs/ObjectStats.md | 2 +- clients/python/docs/StageRangeCreation.md | 16 - clients/python/docs/StagingLocation.md | 4 +- .../python/lakefs_client/api/import_api.py | 272 +--------------- .../lakefs_client/model/import_location.py | 4 +- .../lakefs_client/model/import_pagination.py | 276 ---------------- ...import_status_resp.py => import_status.py} | 6 +- .../model/ingest_range_creation_response.py | 268 ---------------- .../lakefs_client/model/object_stats.py | 4 +- .../model/stage_range_creation.py | 282 ---------------- .../lakefs_client/model/staging_location.py | 8 +- .../python/lakefs_client/models/__init__.py | 5 +- clients/python/test/test_import_api.py | 14 - clients/python/test/test_import_pagination.py | 36 --- ...t_status_resp.py => test_import_status.py} | 12 +- .../test_ingest_range_creation_response.py | 40 --- .../python/test/test_stage_range_creation.py | 36 --- docs/assets/js/swagger.yml | 146 +-------- esti/copy_test.go | 65 +--- esti/import_test.go | 272 ++-------------- pkg/api/controller.go | 90 +----- pkg/api/controller_test.go | 217 +------------ webui/src/lib/api/index.js | 34 +- 57 files changed, 174 insertions(+), 4028 deletions(-) delete mode 100644 clients/java/docs/ImportPagination.md rename clients/java/docs/{ImportStatusResp.md => ImportStatus.md} (95%) delete mode 100644 clients/java/docs/IngestRangeCreationResponse.md delete mode 100644 clients/java/docs/StageRangeCreation.md delete mode 100644 clients/java/src/main/java/io/lakefs/clients/api/model/ImportPagination.java rename clients/java/src/main/java/io/lakefs/clients/api/model/{ImportStatusResp.java => ImportStatus.java} (84%) delete mode 100644 clients/java/src/main/java/io/lakefs/clients/api/model/IngestRangeCreationResponse.java delete mode 100644 clients/java/src/main/java/io/lakefs/clients/api/model/StageRangeCreation.java delete mode 100644 clients/java/src/test/java/io/lakefs/clients/api/model/ImportPaginationTest.java rename clients/java/src/test/java/io/lakefs/clients/api/model/{ImportStatusRespTest.java => ImportStatusTest.java} (86%) delete mode 100644 clients/java/src/test/java/io/lakefs/clients/api/model/IngestRangeCreationResponseTest.java delete mode 100644 clients/java/src/test/java/io/lakefs/clients/api/model/StageRangeCreationTest.java delete mode 100644 clients/python/docs/ImportPagination.md rename clients/python/docs/{ImportStatusResp.md => ImportStatus.md} (97%) delete mode 100644 clients/python/docs/IngestRangeCreationResponse.md delete mode 100644 clients/python/docs/StageRangeCreation.md delete mode 100644 clients/python/lakefs_client/model/import_pagination.py rename clients/python/lakefs_client/model/{import_status_resp.py => import_status.py} (98%) delete mode 100644 clients/python/lakefs_client/model/ingest_range_creation_response.py delete mode 100644 clients/python/lakefs_client/model/stage_range_creation.py delete mode 100644 clients/python/test/test_import_pagination.py rename clients/python/test/{test_import_status_resp.py => test_import_status.py} (68%) delete mode 100644 clients/python/test/test_ingest_range_creation_response.py delete mode 100644 clients/python/test/test_stage_range_creation.py diff --git a/api/swagger.yml b/api/swagger.yml index 4df6c7fd5f9..3b9ebba8be6 100644 --- a/api/swagger.yml +++ b/api/swagger.yml @@ -5,8 +5,8 @@ info: title: lakeFS API license: name: "Apache 2.0" - url: "https://www.apache.org/licenses/LICENSE-2.0.html" - version: "0.1.0" + url: https://www.apache.org/licenses/LICENSE-2.0.html + version: 0.1.0 servers: - url: "/api/v1" @@ -151,25 +151,6 @@ components: minimum: 0 description: Maximal number of entries per page - ImportPagination: - type: object - required: - - has_more - - last_key - properties: - has_more: - type: boolean - description: More keys to be ingested. - continuation_token: - type: string - description: Opaque. Token used to import the next range. - last_key: - type: string - description: Last object store key that was ingested. - staging_token: - type: string - description: Staging token for skipped objects during ingest - Repository: type: object required: @@ -290,9 +271,9 @@ components: type: integer format: int64 description: | - If present and nonzero, physical_address is a presigned URL and + If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than - the presigned URL lifetime if an authentication token is about + the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. @@ -1132,14 +1113,14 @@ components: presigned_url: type: string nullable: true - description: if presign=true is passed in the request, this field will contain a presigned URL to use when uploading + description: if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading presigned_url_expiry: type: integer format: int64 description: | - If present and nonzero, physical_address is a presigned URL and + If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than - the presigned URL lifetime if an authentication token is about + the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. @@ -1267,7 +1248,7 @@ components: description: Path type, can either be 'common_prefix' or 'object' path: type: string - description: A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type. + description: A source location to import path or to a single object. Must match the lakeFS installation blockstore type. example: s3://my-bucket/production/collections/ destination: type: string @@ -1295,32 +1276,6 @@ components: destination: collections/file1 type: object - StageRangeCreation: - type: object - required: - - fromSourceURI - - after - - prepend - properties: - fromSourceURI: - type: string - description: The source location of the ingested files. Must match the lakeFS installation blockstore type. - example: s3://my-bucket/production/collections/ - after: - type: string - description: Only objects after this key would be ingested. - example: production/collections/some/file.parquet - prepend: - type: string - description: A prefix to prepend to ingested objects. - example: collections/ - continuation_token: - type: string - description: Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key. - staging_token: - type: string - description: Opaque. Client should pass staging_token if received from server on previous request - RangeMetadata: type: object required: @@ -1349,15 +1304,7 @@ components: type: integer description: Estimated size of the range in bytes - IngestRangeCreationResponse: - type: object - properties: - range: - $ref: "#/components/schemas/RangeMetadata" - pagination: - $ref: "#/components/schemas/ImportPagination" - - ImportStatusResp: + ImportStatus: type: object properties: completed: @@ -1889,7 +1836,7 @@ paths: - $ref: "#/components/parameters/PaginationAmount" responses: 200: - description: group memeber list + description: group member list content: application/json: schema: @@ -3410,7 +3357,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/ImportStatusResp" + $ref: "#/components/schemas/ImportStatus" 401: $ref: "#/components/responses/Unauthorized" 404: @@ -3469,77 +3416,6 @@ paths: default: $ref: "#/components/responses/ServerError" - /repositories/{repository}/branches/metaranges: - parameters: - - in: path - name: repository - required: true - schema: - type: string - post: - tags: - - import - operationId: createMetaRange - summary: create a lakeFS metarange file from the given ranges - requestBody: - required: true - content: - application/json: - schema: - $ref: "#/components/schemas/MetaRangeCreation" - responses: - 201: - description: metarange metadata - content: - application/json: - schema: - $ref: "#/components/schemas/MetaRangeCreationResponse" - 400: - $ref: "#/components/responses/ValidationError" - 401: - $ref: "#/components/responses/Unauthorized" - 403: - $ref: "#/components/responses/Forbidden" - 404: - $ref: "#/components/responses/NotFound" - default: - $ref: "#/components/responses/ServerError" - - /repositories/{repository}/branches/ranges: - parameters: - - in: path - name: repository - required: true - schema: - type: string - post: - tags: - - import - operationId: ingestRange - summary: create a lakeFS range file from the source uri - requestBody: - required: true - content: - application/json: - schema: - $ref: "#/components/schemas/StageRangeCreation" - responses: - 201: - description: range metadata - content: - application/json: - schema: - $ref: "#/components/schemas/IngestRangeCreationResponse" - - 400: - $ref: "#/components/responses/ValidationError" - 401: - $ref: "#/components/responses/Unauthorized" - 404: - $ref: "#/components/responses/NotFound" - default: - $ref: "#/components/responses/ServerError" - /repositories/{repository}/branches/{branch}/objects/stage_allowed: parameters: - in: path diff --git a/clients/java/README.md b/clients/java/README.md index ccc0080be23..441a6dcf064 100644 --- a/clients/java/README.md +++ b/clients/java/README.md @@ -186,11 +186,9 @@ Class | Method | HTTP request | Description *ExperimentalApi* | [**getOtfDiffs**](docs/ExperimentalApi.md#getOtfDiffs) | **GET** /otf/diffs | get the available Open Table Format diffs *ExperimentalApi* | [**otfDiff**](docs/ExperimentalApi.md#otfDiff) | **GET** /repositories/{repository}/otf/refs/{left_ref}/diff/{right_ref} | perform otf diff *HealthCheckApi* | [**healthCheck**](docs/HealthCheckApi.md#healthCheck) | **GET** /healthcheck | -*ImportApi* | [**createMetaRange**](docs/ImportApi.md#createMetaRange) | **POST** /repositories/{repository}/branches/metaranges | create a lakeFS metarange file from the given ranges *ImportApi* | [**importCancel**](docs/ImportApi.md#importCancel) | **DELETE** /repositories/{repository}/branches/{branch}/import | cancel ongoing import *ImportApi* | [**importStart**](docs/ImportApi.md#importStart) | **POST** /repositories/{repository}/branches/{branch}/import | import data from object store *ImportApi* | [**importStatus**](docs/ImportApi.md#importStatus) | **GET** /repositories/{repository}/branches/{branch}/import | get import status -*ImportApi* | [**ingestRange**](docs/ImportApi.md#ingestRange) | **POST** /repositories/{repository}/branches/ranges | create a lakeFS range file from the source uri *InternalApi* | [**createBranchProtectionRulePreflight**](docs/InternalApi.md#createBranchProtectionRulePreflight) | **GET** /repositories/{repository}/branch_protection/set_allowed | *InternalApi* | [**getAuthCapabilities**](docs/InternalApi.md#getAuthCapabilities) | **GET** /auth/capabilities | list authentication capabilities supported *InternalApi* | [**getSetupState**](docs/InternalApi.md#getSetupState) | **GET** /setup_lakefs | check if the lakeFS installation is already set up @@ -277,9 +275,7 @@ Class | Method | HTTP request | Description - [ImportCreation](docs/ImportCreation.md) - [ImportCreationResponse](docs/ImportCreationResponse.md) - [ImportLocation](docs/ImportLocation.md) - - [ImportPagination](docs/ImportPagination.md) - - [ImportStatusResp](docs/ImportStatusResp.md) - - [IngestRangeCreationResponse](docs/IngestRangeCreationResponse.md) + - [ImportStatus](docs/ImportStatus.md) - [InlineObject](docs/InlineObject.md) - [InlineObject1](docs/InlineObject1.md) - [LoginConfig](docs/LoginConfig.md) @@ -314,7 +310,6 @@ Class | Method | HTTP request | Description - [RevertCreation](docs/RevertCreation.md) - [Setup](docs/Setup.md) - [SetupState](docs/SetupState.md) - - [StageRangeCreation](docs/StageRangeCreation.md) - [StagingLocation](docs/StagingLocation.md) - [StagingMetadata](docs/StagingMetadata.md) - [Statement](docs/Statement.md) diff --git a/clients/java/api/openapi.yaml b/clients/java/api/openapi.yaml index 1f57eb1e062..1c53abe4a5d 100644 --- a/clients/java/api/openapi.yaml +++ b/clients/java/api/openapi.yaml @@ -794,7 +794,7 @@ paths: application/json: schema: $ref: '#/components/schemas/UserList' - description: group memeber list + description: group member list "401": content: application/json: @@ -3626,7 +3626,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ImportStatusResp' + $ref: '#/components/schemas/ImportStatus' description: import status "401": content: @@ -3709,118 +3709,6 @@ paths: - import x-contentType: application/json x-accepts: application/json - /repositories/{repository}/branches/metaranges: - post: - operationId: createMetaRange - parameters: - - explode: false - in: path - name: repository - required: true - schema: - type: string - style: simple - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/MetaRangeCreation' - required: true - responses: - "201": - content: - application/json: - schema: - $ref: '#/components/schemas/MetaRangeCreationResponse' - description: metarange metadata - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - description: Validation Error - "401": - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - description: Unauthorized - "403": - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - description: Forbidden - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - description: Resource Not Found - default: - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - description: Internal Server Error - summary: create a lakeFS metarange file from the given ranges - tags: - - import - x-contentType: application/json - x-accepts: application/json - /repositories/{repository}/branches/ranges: - post: - operationId: ingestRange - parameters: - - explode: false - in: path - name: repository - required: true - schema: - type: string - style: simple - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/StageRangeCreation' - required: true - responses: - "201": - content: - application/json: - schema: - $ref: '#/components/schemas/IngestRangeCreationResponse' - description: range metadata - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - description: Validation Error - "401": - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - description: Unauthorized - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - description: Resource Not Found - default: - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - description: Internal Server Error - summary: create a lakeFS range file from the source uri - tags: - - import - x-contentType: application/json - x-accepts: application/json /repositories/{repository}/branches/{branch}/objects/stage_allowed: get: operationId: uploadObjectPreflight @@ -5546,29 +5434,6 @@ components: - next_offset - results type: object - ImportPagination: - example: - continuation_token: continuation_token - staging_token: staging_token - has_more: true - last_key: last_key - properties: - has_more: - description: More keys to be ingested. - type: boolean - continuation_token: - description: Opaque. Token used to import the next range. - type: string - last_key: - description: Last object store key that was ingested. - type: string - staging_token: - description: Staging token for skipped objects during ingest - type: string - required: - - has_more - - last_key - type: object Repository: example: default_branch: default_branch @@ -5721,9 +5586,9 @@ components: type: string physical_address_expiry: description: | - If present and nonzero, physical_address is a presigned URL and + If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than - the presigned URL lifetime if an authentication token is about + the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. @@ -6963,14 +6828,14 @@ components: type: string presigned_url: description: if presign=true is passed in the request, this field will contain - a presigned URL to use when uploading + a pre-signed URL to use when uploading nullable: true type: string presigned_url_expiry: description: | - If present and nonzero, physical_address is a presigned URL and + If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than - the presigned URL lifetime if an authentication token is about + the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. @@ -7129,7 +6994,7 @@ components: - object type: string path: - description: A source location to ingested path or to a single object. Must + description: A source location to import path or to a single object. Must match the lakeFS installation blockstore type. example: s3://my-bucket/production/collections/ type: string @@ -7162,47 +7027,7 @@ components: - commit - paths type: object - StageRangeCreation: - example: - fromSourceURI: s3://my-bucket/production/collections/ - continuation_token: continuation_token - staging_token: staging_token - prepend: collections/ - after: production/collections/some/file.parquet - properties: - fromSourceURI: - description: The source location of the ingested files. Must match the lakeFS - installation blockstore type. - example: s3://my-bucket/production/collections/ - type: string - after: - description: Only objects after this key would be ingested. - example: production/collections/some/file.parquet - type: string - prepend: - description: A prefix to prepend to ingested objects. - example: collections/ - type: string - continuation_token: - description: Opaque. Client should pass the continuation_token received - from server to continue creation ranges from the same key. - type: string - staging_token: - description: Opaque. Client should pass staging_token if received from server - on previous request - type: string - required: - - after - - fromSourceURI - - prepend - type: object RangeMetadata: - example: - max_key: production/collections/some/file_8229.parquet - count: 0 - estimated_size: 6 - id: 480e19972a6fbe98ab8e81ae5efdfd1a29037587e91244e87abd4adefffdb01c - min_key: production/collections/some/file_1.parquet properties: id: description: ID of the range. @@ -7229,26 +7054,7 @@ components: - max_key - min_key type: object - IngestRangeCreationResponse: - example: - pagination: - continuation_token: continuation_token - staging_token: staging_token - has_more: true - last_key: last_key - range: - max_key: production/collections/some/file_8229.parquet - count: 0 - estimated_size: 6 - id: 480e19972a6fbe98ab8e81ae5efdfd1a29037587e91244e87abd4adefffdb01c - min_key: production/collections/some/file_1.parquet - properties: - range: - $ref: '#/components/schemas/RangeMetadata' - pagination: - $ref: '#/components/schemas/ImportPagination' - type: object - ImportStatusResp: + ImportStatus: example: update_time: 2000-01-23T04:56:07.000+00:00 metarange_id: metarange_id @@ -7298,18 +7104,6 @@ components: - id type: object MetaRangeCreation: - example: - ranges: - - max_key: production/collections/some/file_8229.parquet - count: 0 - estimated_size: 6 - id: 480e19972a6fbe98ab8e81ae5efdfd1a29037587e91244e87abd4adefffdb01c - min_key: production/collections/some/file_1.parquet - - max_key: production/collections/some/file_8229.parquet - count: 0 - estimated_size: 6 - id: 480e19972a6fbe98ab8e81ae5efdfd1a29037587e91244e87abd4adefffdb01c - min_key: production/collections/some/file_1.parquet properties: ranges: items: @@ -7320,8 +7114,6 @@ components: - ranges type: object MetaRangeCreationResponse: - example: - id: id properties: id: description: The id of the created metarange diff --git a/clients/java/docs/AuthApi.md b/clients/java/docs/AuthApi.md index f8afed299a9..955bdd8a725 100644 --- a/clients/java/docs/AuthApi.md +++ b/clients/java/docs/AuthApi.md @@ -1950,7 +1950,7 @@ Name | Type | Description | Notes ### HTTP response details | Status code | Description | Response headers | |-------------|-------------|------------------| -**200** | group memeber list | - | +**200** | group member list | - | **401** | Unauthorized | - | **0** | Internal Server Error | - | diff --git a/clients/java/docs/ImportApi.md b/clients/java/docs/ImportApi.md index 1153902fe38..1746e39662f 100644 --- a/clients/java/docs/ImportApi.md +++ b/clients/java/docs/ImportApi.md @@ -4,108 +4,11 @@ All URIs are relative to *http://localhost/api/v1* Method | HTTP request | Description ------------- | ------------- | ------------- -[**createMetaRange**](ImportApi.md#createMetaRange) | **POST** /repositories/{repository}/branches/metaranges | create a lakeFS metarange file from the given ranges [**importCancel**](ImportApi.md#importCancel) | **DELETE** /repositories/{repository}/branches/{branch}/import | cancel ongoing import [**importStart**](ImportApi.md#importStart) | **POST** /repositories/{repository}/branches/{branch}/import | import data from object store [**importStatus**](ImportApi.md#importStatus) | **GET** /repositories/{repository}/branches/{branch}/import | get import status -[**ingestRange**](ImportApi.md#ingestRange) | **POST** /repositories/{repository}/branches/ranges | create a lakeFS range file from the source uri - -# **createMetaRange** -> MetaRangeCreationResponse createMetaRange(repository, metaRangeCreation) - -create a lakeFS metarange file from the given ranges - -### Example -```java -// Import classes: -import io.lakefs.clients.api.ApiClient; -import io.lakefs.clients.api.ApiException; -import io.lakefs.clients.api.Configuration; -import io.lakefs.clients.api.auth.*; -import io.lakefs.clients.api.models.*; -import io.lakefs.clients.api.ImportApi; - -public class Example { - public static void main(String[] args) { - ApiClient defaultClient = Configuration.getDefaultApiClient(); - defaultClient.setBasePath("http://localhost/api/v1"); - - // Configure HTTP basic authorization: basic_auth - HttpBasicAuth basic_auth = (HttpBasicAuth) defaultClient.getAuthentication("basic_auth"); - basic_auth.setUsername("YOUR USERNAME"); - basic_auth.setPassword("YOUR PASSWORD"); - - // Configure API key authorization: cookie_auth - ApiKeyAuth cookie_auth = (ApiKeyAuth) defaultClient.getAuthentication("cookie_auth"); - cookie_auth.setApiKey("YOUR API KEY"); - // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) - //cookie_auth.setApiKeyPrefix("Token"); - - // Configure HTTP bearer authorization: jwt_token - HttpBearerAuth jwt_token = (HttpBearerAuth) defaultClient.getAuthentication("jwt_token"); - jwt_token.setBearerToken("BEARER TOKEN"); - - // Configure API key authorization: oidc_auth - ApiKeyAuth oidc_auth = (ApiKeyAuth) defaultClient.getAuthentication("oidc_auth"); - oidc_auth.setApiKey("YOUR API KEY"); - // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) - //oidc_auth.setApiKeyPrefix("Token"); - - // Configure API key authorization: saml_auth - ApiKeyAuth saml_auth = (ApiKeyAuth) defaultClient.getAuthentication("saml_auth"); - saml_auth.setApiKey("YOUR API KEY"); - // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) - //saml_auth.setApiKeyPrefix("Token"); - - ImportApi apiInstance = new ImportApi(defaultClient); - String repository = "repository_example"; // String | - MetaRangeCreation metaRangeCreation = new MetaRangeCreation(); // MetaRangeCreation | - try { - MetaRangeCreationResponse result = apiInstance.createMetaRange(repository, metaRangeCreation); - System.out.println(result); - } catch (ApiException e) { - System.err.println("Exception when calling ImportApi#createMetaRange"); - System.err.println("Status code: " + e.getCode()); - System.err.println("Reason: " + e.getResponseBody()); - System.err.println("Response headers: " + e.getResponseHeaders()); - e.printStackTrace(); - } - } -} -``` - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **repository** | **String**| | - **metaRangeCreation** | [**MetaRangeCreation**](MetaRangeCreation.md)| | - -### Return type - -[**MetaRangeCreationResponse**](MetaRangeCreationResponse.md) - -### Authorization - -[basic_auth](../README.md#basic_auth), [cookie_auth](../README.md#cookie_auth), [jwt_token](../README.md#jwt_token), [oidc_auth](../README.md#oidc_auth), [saml_auth](../README.md#saml_auth) - -### HTTP request headers - - - **Content-Type**: application/json - - **Accept**: application/json - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**201** | metarange metadata | - | -**400** | Validation Error | - | -**401** | Unauthorized | - | -**403** | Forbidden | - | -**404** | Resource Not Found | - | -**0** | Internal Server Error | - | - # **importCancel** > importCancel(repository, branch, id) @@ -300,7 +203,7 @@ Name | Type | Description | Notes # **importStatus** -> ImportStatusResp importStatus(repository, branch, id) +> ImportStatus importStatus(repository, branch, id) get import status @@ -351,7 +254,7 @@ public class Example { String branch = "branch_example"; // String | String id = "id_example"; // String | Unique identifier of the import process try { - ImportStatusResp result = apiInstance.importStatus(repository, branch, id); + ImportStatus result = apiInstance.importStatus(repository, branch, id); System.out.println(result); } catch (ApiException e) { System.err.println("Exception when calling ImportApi#importStatus"); @@ -374,7 +277,7 @@ Name | Type | Description | Notes ### Return type -[**ImportStatusResp**](ImportStatusResp.md) +[**ImportStatus**](ImportStatus.md) ### Authorization @@ -393,97 +296,3 @@ Name | Type | Description | Notes **404** | Resource Not Found | - | **0** | Internal Server Error | - | - -# **ingestRange** -> IngestRangeCreationResponse ingestRange(repository, stageRangeCreation) - -create a lakeFS range file from the source uri - -### Example -```java -// Import classes: -import io.lakefs.clients.api.ApiClient; -import io.lakefs.clients.api.ApiException; -import io.lakefs.clients.api.Configuration; -import io.lakefs.clients.api.auth.*; -import io.lakefs.clients.api.models.*; -import io.lakefs.clients.api.ImportApi; - -public class Example { - public static void main(String[] args) { - ApiClient defaultClient = Configuration.getDefaultApiClient(); - defaultClient.setBasePath("http://localhost/api/v1"); - - // Configure HTTP basic authorization: basic_auth - HttpBasicAuth basic_auth = (HttpBasicAuth) defaultClient.getAuthentication("basic_auth"); - basic_auth.setUsername("YOUR USERNAME"); - basic_auth.setPassword("YOUR PASSWORD"); - - // Configure API key authorization: cookie_auth - ApiKeyAuth cookie_auth = (ApiKeyAuth) defaultClient.getAuthentication("cookie_auth"); - cookie_auth.setApiKey("YOUR API KEY"); - // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) - //cookie_auth.setApiKeyPrefix("Token"); - - // Configure HTTP bearer authorization: jwt_token - HttpBearerAuth jwt_token = (HttpBearerAuth) defaultClient.getAuthentication("jwt_token"); - jwt_token.setBearerToken("BEARER TOKEN"); - - // Configure API key authorization: oidc_auth - ApiKeyAuth oidc_auth = (ApiKeyAuth) defaultClient.getAuthentication("oidc_auth"); - oidc_auth.setApiKey("YOUR API KEY"); - // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) - //oidc_auth.setApiKeyPrefix("Token"); - - // Configure API key authorization: saml_auth - ApiKeyAuth saml_auth = (ApiKeyAuth) defaultClient.getAuthentication("saml_auth"); - saml_auth.setApiKey("YOUR API KEY"); - // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null) - //saml_auth.setApiKeyPrefix("Token"); - - ImportApi apiInstance = new ImportApi(defaultClient); - String repository = "repository_example"; // String | - StageRangeCreation stageRangeCreation = new StageRangeCreation(); // StageRangeCreation | - try { - IngestRangeCreationResponse result = apiInstance.ingestRange(repository, stageRangeCreation); - System.out.println(result); - } catch (ApiException e) { - System.err.println("Exception when calling ImportApi#ingestRange"); - System.err.println("Status code: " + e.getCode()); - System.err.println("Reason: " + e.getResponseBody()); - System.err.println("Response headers: " + e.getResponseHeaders()); - e.printStackTrace(); - } - } -} -``` - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **repository** | **String**| | - **stageRangeCreation** | [**StageRangeCreation**](StageRangeCreation.md)| | - -### Return type - -[**IngestRangeCreationResponse**](IngestRangeCreationResponse.md) - -### Authorization - -[basic_auth](../README.md#basic_auth), [cookie_auth](../README.md#cookie_auth), [jwt_token](../README.md#jwt_token), [oidc_auth](../README.md#oidc_auth), [saml_auth](../README.md#saml_auth) - -### HTTP request headers - - - **Content-Type**: application/json - - **Accept**: application/json - -### HTTP response details -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**201** | range metadata | - | -**400** | Validation Error | - | -**401** | Unauthorized | - | -**404** | Resource Not Found | - | -**0** | Internal Server Error | - | - diff --git a/clients/java/docs/ImportLocation.md b/clients/java/docs/ImportLocation.md index fdabf7bae94..bbea056cd4e 100644 --- a/clients/java/docs/ImportLocation.md +++ b/clients/java/docs/ImportLocation.md @@ -8,7 +8,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **type** | [**TypeEnum**](#TypeEnum) | Path type, can either be 'common_prefix' or 'object' | -**path** | **String** | A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type. | +**path** | **String** | A source location to import path or to a single object. Must match the lakeFS installation blockstore type. | **destination** | **String** | Destination for the imported objects on the branch | diff --git a/clients/java/docs/ImportPagination.md b/clients/java/docs/ImportPagination.md deleted file mode 100644 index 42c4f46255f..00000000000 --- a/clients/java/docs/ImportPagination.md +++ /dev/null @@ -1,16 +0,0 @@ - - -# ImportPagination - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**hasMore** | **Boolean** | More keys to be ingested. | -**continuationToken** | **String** | Opaque. Token used to import the next range. | [optional] -**lastKey** | **String** | Last object store key that was ingested. | -**stagingToken** | **String** | Staging token for skipped objects during ingest | [optional] - - - diff --git a/clients/java/docs/ImportStatusResp.md b/clients/java/docs/ImportStatus.md similarity index 95% rename from clients/java/docs/ImportStatusResp.md rename to clients/java/docs/ImportStatus.md index 28c4c44ad65..e184f352672 100644 --- a/clients/java/docs/ImportStatusResp.md +++ b/clients/java/docs/ImportStatus.md @@ -1,6 +1,6 @@ -# ImportStatusResp +# ImportStatus ## Properties diff --git a/clients/java/docs/IngestRangeCreationResponse.md b/clients/java/docs/IngestRangeCreationResponse.md deleted file mode 100644 index b55853a2f01..00000000000 --- a/clients/java/docs/IngestRangeCreationResponse.md +++ /dev/null @@ -1,14 +0,0 @@ - - -# IngestRangeCreationResponse - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**range** | [**RangeMetadata**](RangeMetadata.md) | | [optional] -**pagination** | [**ImportPagination**](ImportPagination.md) | | [optional] - - - diff --git a/clients/java/docs/ObjectStats.md b/clients/java/docs/ObjectStats.md index 9b2ccd70ebd..ed818768da7 100644 --- a/clients/java/docs/ObjectStats.md +++ b/clients/java/docs/ObjectStats.md @@ -10,7 +10,7 @@ Name | Type | Description | Notes **path** | **String** | | **pathType** | [**PathTypeEnum**](#PathTypeEnum) | | **physicalAddress** | **String** | The location of the object on the underlying object store. Formatted as a native URI with the object store type as scheme (\"s3://...\", \"gs://...\", etc.) Or, in the case of presign=true, will be an HTTP URL to be consumed via regular HTTP GET | -**physicalAddressExpiry** | **Long** | If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional] +**physicalAddressExpiry** | **Long** | If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional] **checksum** | **String** | | **sizeBytes** | **Long** | | [optional] **mtime** | **Long** | Unix Epoch in seconds | diff --git a/clients/java/docs/StageRangeCreation.md b/clients/java/docs/StageRangeCreation.md deleted file mode 100644 index 9ffe0932b0a..00000000000 --- a/clients/java/docs/StageRangeCreation.md +++ /dev/null @@ -1,17 +0,0 @@ - - -# StageRangeCreation - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**fromSourceURI** | **String** | The source location of the ingested files. Must match the lakeFS installation blockstore type. | -**after** | **String** | Only objects after this key would be ingested. | -**prepend** | **String** | A prefix to prepend to ingested objects. | -**continuationToken** | **String** | Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key. | [optional] -**stagingToken** | **String** | Opaque. Client should pass staging_token if received from server on previous request | [optional] - - - diff --git a/clients/java/docs/StagingLocation.md b/clients/java/docs/StagingLocation.md index b28a1d95e08..aa20403bec4 100644 --- a/clients/java/docs/StagingLocation.md +++ b/clients/java/docs/StagingLocation.md @@ -10,8 +10,8 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **physicalAddress** | **String** | | [optional] **token** | **String** | opaque staging token to use to link uploaded object | -**presignedUrl** | **String** | if presign=true is passed in the request, this field will contain a presigned URL to use when uploading | [optional] -**presignedUrlExpiry** | **Long** | If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional] +**presignedUrl** | **String** | if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading | [optional] +**presignedUrlExpiry** | **Long** | If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional] diff --git a/clients/java/src/main/java/io/lakefs/clients/api/AuthApi.java b/clients/java/src/main/java/io/lakefs/clients/api/AuthApi.java index 6bf7a89ef65..f9013f05c59 100644 --- a/clients/java/src/main/java/io/lakefs/clients/api/AuthApi.java +++ b/clients/java/src/main/java/io/lakefs/clients/api/AuthApi.java @@ -2575,7 +2575,7 @@ public okhttp3.Call getUserAsync(String userId, final ApiCallback _callbac * @http.response.details - +
Status Code Description Response Headers
200 group memeber list -
200 group member list -
401 Unauthorized -
0 Internal Server Error -
@@ -2649,7 +2649,7 @@ private okhttp3.Call listGroupMembersValidateBeforeCall(String groupId, String p * @http.response.details - +
Status Code Description Response Headers
200 group memeber list -
200 group member list -
401 Unauthorized -
0 Internal Server Error -
@@ -2671,7 +2671,7 @@ public UserList listGroupMembers(String groupId, String prefix, String after, In * @http.response.details - +
Status Code Description Response Headers
200 group memeber list -
200 group member list -
401 Unauthorized -
0 Internal Server Error -
@@ -2695,7 +2695,7 @@ public ApiResponse listGroupMembersWithHttpInfo(String groupId, String * @http.response.details - +
Status Code Description Response Headers
200 group memeber list -
200 group member list -
401 Unauthorized -
0 Internal Server Error -
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/ImportApi.java b/clients/java/src/main/java/io/lakefs/clients/api/ImportApi.java index 88e870c48d4..b5979916631 100644 --- a/clients/java/src/main/java/io/lakefs/clients/api/ImportApi.java +++ b/clients/java/src/main/java/io/lakefs/clients/api/ImportApi.java @@ -30,11 +30,7 @@ import io.lakefs.clients.api.model.Error; import io.lakefs.clients.api.model.ImportCreation; import io.lakefs.clients.api.model.ImportCreationResponse; -import io.lakefs.clients.api.model.ImportStatusResp; -import io.lakefs.clients.api.model.IngestRangeCreationResponse; -import io.lakefs.clients.api.model.MetaRangeCreation; -import io.lakefs.clients.api.model.MetaRangeCreationResponse; -import io.lakefs.clients.api.model.StageRangeCreation; +import io.lakefs.clients.api.model.ImportStatus; import java.lang.reflect.Type; import java.util.ArrayList; @@ -61,147 +57,6 @@ public void setApiClient(ApiClient apiClient) { this.localVarApiClient = apiClient; } - /** - * Build call for createMetaRange - * @param repository (required) - * @param metaRangeCreation (required) - * @param _callback Callback for upload/download progress - * @return Call to execute - * @throws ApiException If fail to serialize the request body object - * @http.response.details - - - - - - - - -
Status Code Description Response Headers
201 metarange metadata -
400 Validation Error -
401 Unauthorized -
403 Forbidden -
404 Resource Not Found -
0 Internal Server Error -
- */ - public okhttp3.Call createMetaRangeCall(String repository, MetaRangeCreation metaRangeCreation, final ApiCallback _callback) throws ApiException { - Object localVarPostBody = metaRangeCreation; - - // create path and map variables - String localVarPath = "/repositories/{repository}/branches/metaranges" - .replaceAll("\\{" + "repository" + "\\}", localVarApiClient.escapeString(repository.toString())); - - List localVarQueryParams = new ArrayList(); - List localVarCollectionQueryParams = new ArrayList(); - Map localVarHeaderParams = new HashMap(); - Map localVarCookieParams = new HashMap(); - Map localVarFormParams = new HashMap(); - - final String[] localVarAccepts = { - "application/json" - }; - final String localVarAccept = localVarApiClient.selectHeaderAccept(localVarAccepts); - if (localVarAccept != null) { - localVarHeaderParams.put("Accept", localVarAccept); - } - - final String[] localVarContentTypes = { - "application/json" - }; - final String localVarContentType = localVarApiClient.selectHeaderContentType(localVarContentTypes); - localVarHeaderParams.put("Content-Type", localVarContentType); - - String[] localVarAuthNames = new String[] { "basic_auth", "cookie_auth", "jwt_token", "oidc_auth", "saml_auth" }; - return localVarApiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAuthNames, _callback); - } - - @SuppressWarnings("rawtypes") - private okhttp3.Call createMetaRangeValidateBeforeCall(String repository, MetaRangeCreation metaRangeCreation, final ApiCallback _callback) throws ApiException { - - // verify the required parameter 'repository' is set - if (repository == null) { - throw new ApiException("Missing the required parameter 'repository' when calling createMetaRange(Async)"); - } - - // verify the required parameter 'metaRangeCreation' is set - if (metaRangeCreation == null) { - throw new ApiException("Missing the required parameter 'metaRangeCreation' when calling createMetaRange(Async)"); - } - - - okhttp3.Call localVarCall = createMetaRangeCall(repository, metaRangeCreation, _callback); - return localVarCall; - - } - - /** - * create a lakeFS metarange file from the given ranges - * - * @param repository (required) - * @param metaRangeCreation (required) - * @return MetaRangeCreationResponse - * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body - * @http.response.details - - - - - - - - -
Status Code Description Response Headers
201 metarange metadata -
400 Validation Error -
401 Unauthorized -
403 Forbidden -
404 Resource Not Found -
0 Internal Server Error -
- */ - public MetaRangeCreationResponse createMetaRange(String repository, MetaRangeCreation metaRangeCreation) throws ApiException { - ApiResponse localVarResp = createMetaRangeWithHttpInfo(repository, metaRangeCreation); - return localVarResp.getData(); - } - - /** - * create a lakeFS metarange file from the given ranges - * - * @param repository (required) - * @param metaRangeCreation (required) - * @return ApiResponse<MetaRangeCreationResponse> - * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body - * @http.response.details - - - - - - - - -
Status Code Description Response Headers
201 metarange metadata -
400 Validation Error -
401 Unauthorized -
403 Forbidden -
404 Resource Not Found -
0 Internal Server Error -
- */ - public ApiResponse createMetaRangeWithHttpInfo(String repository, MetaRangeCreation metaRangeCreation) throws ApiException { - okhttp3.Call localVarCall = createMetaRangeValidateBeforeCall(repository, metaRangeCreation, null); - Type localVarReturnType = new TypeToken(){}.getType(); - return localVarApiClient.execute(localVarCall, localVarReturnType); - } - - /** - * create a lakeFS metarange file from the given ranges (asynchronously) - * - * @param repository (required) - * @param metaRangeCreation (required) - * @param _callback The callback to be executed when the API call finishes - * @return The request call - * @throws ApiException If fail to process the API call, e.g. serializing the request body object - * @http.response.details - - - - - - - - -
Status Code Description Response Headers
201 metarange metadata -
400 Validation Error -
401 Unauthorized -
403 Forbidden -
404 Resource Not Found -
0 Internal Server Error -
- */ - public okhttp3.Call createMetaRangeAsync(String repository, MetaRangeCreation metaRangeCreation, final ApiCallback _callback) throws ApiException { - - okhttp3.Call localVarCall = createMetaRangeValidateBeforeCall(repository, metaRangeCreation, _callback); - Type localVarReturnType = new TypeToken(){}.getType(); - localVarApiClient.executeAsync(localVarCall, localVarReturnType, _callback); - return localVarCall; - } /** * Build call for importCancel * @param repository (required) @@ -583,7 +438,7 @@ private okhttp3.Call importStatusValidateBeforeCall(String repository, String br * @param repository (required) * @param branch (required) * @param id Unique identifier of the import process (required) - * @return ImportStatusResp + * @return ImportStatus * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body * @http.response.details @@ -594,8 +449,8 @@ private okhttp3.Call importStatusValidateBeforeCall(String repository, String br
0 Internal Server Error -
*/ - public ImportStatusResp importStatus(String repository, String branch, String id) throws ApiException { - ApiResponse localVarResp = importStatusWithHttpInfo(repository, branch, id); + public ImportStatus importStatus(String repository, String branch, String id) throws ApiException { + ApiResponse localVarResp = importStatusWithHttpInfo(repository, branch, id); return localVarResp.getData(); } @@ -605,7 +460,7 @@ public ImportStatusResp importStatus(String repository, String branch, String id * @param repository (required) * @param branch (required) * @param id Unique identifier of the import process (required) - * @return ApiResponse<ImportStatusResp> + * @return ApiResponse<ImportStatus> * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body * @http.response.details @@ -616,9 +471,9 @@ public ImportStatusResp importStatus(String repository, String branch, String id
0 Internal Server Error -
*/ - public ApiResponse importStatusWithHttpInfo(String repository, String branch, String id) throws ApiException { + public ApiResponse importStatusWithHttpInfo(String repository, String branch, String id) throws ApiException { okhttp3.Call localVarCall = importStatusValidateBeforeCall(repository, branch, id, null); - Type localVarReturnType = new TypeToken(){}.getType(); + Type localVarReturnType = new TypeToken(){}.getType(); return localVarApiClient.execute(localVarCall, localVarReturnType); } @@ -640,147 +495,10 @@ public ApiResponse importStatusWithHttpInfo(String repository, 0 Internal Server Error - */ - public okhttp3.Call importStatusAsync(String repository, String branch, String id, final ApiCallback _callback) throws ApiException { + public okhttp3.Call importStatusAsync(String repository, String branch, String id, final ApiCallback _callback) throws ApiException { okhttp3.Call localVarCall = importStatusValidateBeforeCall(repository, branch, id, _callback); - Type localVarReturnType = new TypeToken(){}.getType(); - localVarApiClient.executeAsync(localVarCall, localVarReturnType, _callback); - return localVarCall; - } - /** - * Build call for ingestRange - * @param repository (required) - * @param stageRangeCreation (required) - * @param _callback Callback for upload/download progress - * @return Call to execute - * @throws ApiException If fail to serialize the request body object - * @http.response.details - - - - - - - -
Status Code Description Response Headers
201 range metadata -
400 Validation Error -
401 Unauthorized -
404 Resource Not Found -
0 Internal Server Error -
- */ - public okhttp3.Call ingestRangeCall(String repository, StageRangeCreation stageRangeCreation, final ApiCallback _callback) throws ApiException { - Object localVarPostBody = stageRangeCreation; - - // create path and map variables - String localVarPath = "/repositories/{repository}/branches/ranges" - .replaceAll("\\{" + "repository" + "\\}", localVarApiClient.escapeString(repository.toString())); - - List localVarQueryParams = new ArrayList(); - List localVarCollectionQueryParams = new ArrayList(); - Map localVarHeaderParams = new HashMap(); - Map localVarCookieParams = new HashMap(); - Map localVarFormParams = new HashMap(); - - final String[] localVarAccepts = { - "application/json" - }; - final String localVarAccept = localVarApiClient.selectHeaderAccept(localVarAccepts); - if (localVarAccept != null) { - localVarHeaderParams.put("Accept", localVarAccept); - } - - final String[] localVarContentTypes = { - "application/json" - }; - final String localVarContentType = localVarApiClient.selectHeaderContentType(localVarContentTypes); - localVarHeaderParams.put("Content-Type", localVarContentType); - - String[] localVarAuthNames = new String[] { "basic_auth", "cookie_auth", "jwt_token", "oidc_auth", "saml_auth" }; - return localVarApiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAuthNames, _callback); - } - - @SuppressWarnings("rawtypes") - private okhttp3.Call ingestRangeValidateBeforeCall(String repository, StageRangeCreation stageRangeCreation, final ApiCallback _callback) throws ApiException { - - // verify the required parameter 'repository' is set - if (repository == null) { - throw new ApiException("Missing the required parameter 'repository' when calling ingestRange(Async)"); - } - - // verify the required parameter 'stageRangeCreation' is set - if (stageRangeCreation == null) { - throw new ApiException("Missing the required parameter 'stageRangeCreation' when calling ingestRange(Async)"); - } - - - okhttp3.Call localVarCall = ingestRangeCall(repository, stageRangeCreation, _callback); - return localVarCall; - - } - - /** - * create a lakeFS range file from the source uri - * - * @param repository (required) - * @param stageRangeCreation (required) - * @return IngestRangeCreationResponse - * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body - * @http.response.details - - - - - - - -
Status Code Description Response Headers
201 range metadata -
400 Validation Error -
401 Unauthorized -
404 Resource Not Found -
0 Internal Server Error -
- */ - public IngestRangeCreationResponse ingestRange(String repository, StageRangeCreation stageRangeCreation) throws ApiException { - ApiResponse localVarResp = ingestRangeWithHttpInfo(repository, stageRangeCreation); - return localVarResp.getData(); - } - - /** - * create a lakeFS range file from the source uri - * - * @param repository (required) - * @param stageRangeCreation (required) - * @return ApiResponse<IngestRangeCreationResponse> - * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body - * @http.response.details - - - - - - - -
Status Code Description Response Headers
201 range metadata -
400 Validation Error -
401 Unauthorized -
404 Resource Not Found -
0 Internal Server Error -
- */ - public ApiResponse ingestRangeWithHttpInfo(String repository, StageRangeCreation stageRangeCreation) throws ApiException { - okhttp3.Call localVarCall = ingestRangeValidateBeforeCall(repository, stageRangeCreation, null); - Type localVarReturnType = new TypeToken(){}.getType(); - return localVarApiClient.execute(localVarCall, localVarReturnType); - } - - /** - * create a lakeFS range file from the source uri (asynchronously) - * - * @param repository (required) - * @param stageRangeCreation (required) - * @param _callback The callback to be executed when the API call finishes - * @return The request call - * @throws ApiException If fail to process the API call, e.g. serializing the request body object - * @http.response.details - - - - - - - -
Status Code Description Response Headers
201 range metadata -
400 Validation Error -
401 Unauthorized -
404 Resource Not Found -
0 Internal Server Error -
- */ - public okhttp3.Call ingestRangeAsync(String repository, StageRangeCreation stageRangeCreation, final ApiCallback _callback) throws ApiException { - - okhttp3.Call localVarCall = ingestRangeValidateBeforeCall(repository, stageRangeCreation, _callback); - Type localVarReturnType = new TypeToken(){}.getType(); + Type localVarReturnType = new TypeToken(){}.getType(); localVarApiClient.executeAsync(localVarCall, localVarReturnType, _callback); return localVarCall; } diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/ImportLocation.java b/clients/java/src/main/java/io/lakefs/clients/api/model/ImportLocation.java index 6447ee9b3e0..9de81d0bbfb 100644 --- a/clients/java/src/main/java/io/lakefs/clients/api/model/ImportLocation.java +++ b/clients/java/src/main/java/io/lakefs/clients/api/model/ImportLocation.java @@ -119,11 +119,11 @@ public ImportLocation path(String path) { } /** - * A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type. + * A source location to import path or to a single object. Must match the lakeFS installation blockstore type. * @return path **/ @javax.annotation.Nonnull - @ApiModelProperty(example = "s3://my-bucket/production/collections/", required = true, value = "A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type.") + @ApiModelProperty(example = "s3://my-bucket/production/collections/", required = true, value = "A source location to import path or to a single object. Must match the lakeFS installation blockstore type.") public String getPath() { return path; diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/ImportPagination.java b/clients/java/src/main/java/io/lakefs/clients/api/model/ImportPagination.java deleted file mode 100644 index 22c650d81ca..00000000000 --- a/clients/java/src/main/java/io/lakefs/clients/api/model/ImportPagination.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * lakeFS API - * lakeFS HTTP API - * - * The version of the OpenAPI document: 0.1.0 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - - -package io.lakefs.clients.api.model; - -import java.util.Objects; -import java.util.Arrays; -import com.google.gson.TypeAdapter; -import com.google.gson.annotations.JsonAdapter; -import com.google.gson.annotations.SerializedName; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonWriter; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; -import java.io.IOException; - -/** - * ImportPagination - */ -@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen") -public class ImportPagination { - public static final String SERIALIZED_NAME_HAS_MORE = "has_more"; - @SerializedName(SERIALIZED_NAME_HAS_MORE) - private Boolean hasMore; - - public static final String SERIALIZED_NAME_CONTINUATION_TOKEN = "continuation_token"; - @SerializedName(SERIALIZED_NAME_CONTINUATION_TOKEN) - private String continuationToken; - - public static final String SERIALIZED_NAME_LAST_KEY = "last_key"; - @SerializedName(SERIALIZED_NAME_LAST_KEY) - private String lastKey; - - public static final String SERIALIZED_NAME_STAGING_TOKEN = "staging_token"; - @SerializedName(SERIALIZED_NAME_STAGING_TOKEN) - private String stagingToken; - - - public ImportPagination hasMore(Boolean hasMore) { - - this.hasMore = hasMore; - return this; - } - - /** - * More keys to be ingested. - * @return hasMore - **/ - @javax.annotation.Nonnull - @ApiModelProperty(required = true, value = "More keys to be ingested.") - - public Boolean getHasMore() { - return hasMore; - } - - - public void setHasMore(Boolean hasMore) { - this.hasMore = hasMore; - } - - - public ImportPagination continuationToken(String continuationToken) { - - this.continuationToken = continuationToken; - return this; - } - - /** - * Opaque. Token used to import the next range. - * @return continuationToken - **/ - @javax.annotation.Nullable - @ApiModelProperty(value = "Opaque. Token used to import the next range.") - - public String getContinuationToken() { - return continuationToken; - } - - - public void setContinuationToken(String continuationToken) { - this.continuationToken = continuationToken; - } - - - public ImportPagination lastKey(String lastKey) { - - this.lastKey = lastKey; - return this; - } - - /** - * Last object store key that was ingested. - * @return lastKey - **/ - @javax.annotation.Nonnull - @ApiModelProperty(required = true, value = "Last object store key that was ingested.") - - public String getLastKey() { - return lastKey; - } - - - public void setLastKey(String lastKey) { - this.lastKey = lastKey; - } - - - public ImportPagination stagingToken(String stagingToken) { - - this.stagingToken = stagingToken; - return this; - } - - /** - * Staging token for skipped objects during ingest - * @return stagingToken - **/ - @javax.annotation.Nullable - @ApiModelProperty(value = "Staging token for skipped objects during ingest") - - public String getStagingToken() { - return stagingToken; - } - - - public void setStagingToken(String stagingToken) { - this.stagingToken = stagingToken; - } - - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - ImportPagination importPagination = (ImportPagination) o; - return Objects.equals(this.hasMore, importPagination.hasMore) && - Objects.equals(this.continuationToken, importPagination.continuationToken) && - Objects.equals(this.lastKey, importPagination.lastKey) && - Objects.equals(this.stagingToken, importPagination.stagingToken); - } - - @Override - public int hashCode() { - return Objects.hash(hasMore, continuationToken, lastKey, stagingToken); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("class ImportPagination {\n"); - sb.append(" hasMore: ").append(toIndentedString(hasMore)).append("\n"); - sb.append(" continuationToken: ").append(toIndentedString(continuationToken)).append("\n"); - sb.append(" lastKey: ").append(toIndentedString(lastKey)).append("\n"); - sb.append(" stagingToken: ").append(toIndentedString(stagingToken)).append("\n"); - sb.append("}"); - return sb.toString(); - } - - /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). - */ - private String toIndentedString(Object o) { - if (o == null) { - return "null"; - } - return o.toString().replace("\n", "\n "); - } - -} - diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/ImportStatusResp.java b/clients/java/src/main/java/io/lakefs/clients/api/model/ImportStatus.java similarity index 84% rename from clients/java/src/main/java/io/lakefs/clients/api/model/ImportStatusResp.java rename to clients/java/src/main/java/io/lakefs/clients/api/model/ImportStatus.java index de3c94f1a30..dd10756f815 100644 --- a/clients/java/src/main/java/io/lakefs/clients/api/model/ImportStatusResp.java +++ b/clients/java/src/main/java/io/lakefs/clients/api/model/ImportStatus.java @@ -28,10 +28,10 @@ import org.threeten.bp.OffsetDateTime; /** - * ImportStatusResp + * ImportStatus */ @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen") -public class ImportStatusResp { +public class ImportStatus { public static final String SERIALIZED_NAME_COMPLETED = "completed"; @SerializedName(SERIALIZED_NAME_COMPLETED) private Boolean completed; @@ -57,7 +57,7 @@ public class ImportStatusResp { private Error error; - public ImportStatusResp completed(Boolean completed) { + public ImportStatus completed(Boolean completed) { this.completed = completed; return this; @@ -80,7 +80,7 @@ public void setCompleted(Boolean completed) { } - public ImportStatusResp updateTime(OffsetDateTime updateTime) { + public ImportStatus updateTime(OffsetDateTime updateTime) { this.updateTime = updateTime; return this; @@ -103,7 +103,7 @@ public void setUpdateTime(OffsetDateTime updateTime) { } - public ImportStatusResp ingestedObjects(Long ingestedObjects) { + public ImportStatus ingestedObjects(Long ingestedObjects) { this.ingestedObjects = ingestedObjects; return this; @@ -126,7 +126,7 @@ public void setIngestedObjects(Long ingestedObjects) { } - public ImportStatusResp metarangeId(String metarangeId) { + public ImportStatus metarangeId(String metarangeId) { this.metarangeId = metarangeId; return this; @@ -149,7 +149,7 @@ public void setMetarangeId(String metarangeId) { } - public ImportStatusResp commit(Commit commit) { + public ImportStatus commit(Commit commit) { this.commit = commit; return this; @@ -172,7 +172,7 @@ public void setCommit(Commit commit) { } - public ImportStatusResp error(Error error) { + public ImportStatus error(Error error) { this.error = error; return this; @@ -203,13 +203,13 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ImportStatusResp importStatusResp = (ImportStatusResp) o; - return Objects.equals(this.completed, importStatusResp.completed) && - Objects.equals(this.updateTime, importStatusResp.updateTime) && - Objects.equals(this.ingestedObjects, importStatusResp.ingestedObjects) && - Objects.equals(this.metarangeId, importStatusResp.metarangeId) && - Objects.equals(this.commit, importStatusResp.commit) && - Objects.equals(this.error, importStatusResp.error); + ImportStatus importStatus = (ImportStatus) o; + return Objects.equals(this.completed, importStatus.completed) && + Objects.equals(this.updateTime, importStatus.updateTime) && + Objects.equals(this.ingestedObjects, importStatus.ingestedObjects) && + Objects.equals(this.metarangeId, importStatus.metarangeId) && + Objects.equals(this.commit, importStatus.commit) && + Objects.equals(this.error, importStatus.error); } @Override @@ -220,7 +220,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ImportStatusResp {\n"); + sb.append("class ImportStatus {\n"); sb.append(" completed: ").append(toIndentedString(completed)).append("\n"); sb.append(" updateTime: ").append(toIndentedString(updateTime)).append("\n"); sb.append(" ingestedObjects: ").append(toIndentedString(ingestedObjects)).append("\n"); diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/IngestRangeCreationResponse.java b/clients/java/src/main/java/io/lakefs/clients/api/model/IngestRangeCreationResponse.java deleted file mode 100644 index eef58e3a41c..00000000000 --- a/clients/java/src/main/java/io/lakefs/clients/api/model/IngestRangeCreationResponse.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * lakeFS API - * lakeFS HTTP API - * - * The version of the OpenAPI document: 0.1.0 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - - -package io.lakefs.clients.api.model; - -import java.util.Objects; -import java.util.Arrays; -import com.google.gson.TypeAdapter; -import com.google.gson.annotations.JsonAdapter; -import com.google.gson.annotations.SerializedName; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonWriter; -import io.lakefs.clients.api.model.ImportPagination; -import io.lakefs.clients.api.model.RangeMetadata; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; -import java.io.IOException; - -/** - * IngestRangeCreationResponse - */ -@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen") -public class IngestRangeCreationResponse { - public static final String SERIALIZED_NAME_RANGE = "range"; - @SerializedName(SERIALIZED_NAME_RANGE) - private RangeMetadata range; - - public static final String SERIALIZED_NAME_PAGINATION = "pagination"; - @SerializedName(SERIALIZED_NAME_PAGINATION) - private ImportPagination pagination; - - - public IngestRangeCreationResponse range(RangeMetadata range) { - - this.range = range; - return this; - } - - /** - * Get range - * @return range - **/ - @javax.annotation.Nullable - @ApiModelProperty(value = "") - - public RangeMetadata getRange() { - return range; - } - - - public void setRange(RangeMetadata range) { - this.range = range; - } - - - public IngestRangeCreationResponse pagination(ImportPagination pagination) { - - this.pagination = pagination; - return this; - } - - /** - * Get pagination - * @return pagination - **/ - @javax.annotation.Nullable - @ApiModelProperty(value = "") - - public ImportPagination getPagination() { - return pagination; - } - - - public void setPagination(ImportPagination pagination) { - this.pagination = pagination; - } - - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - IngestRangeCreationResponse ingestRangeCreationResponse = (IngestRangeCreationResponse) o; - return Objects.equals(this.range, ingestRangeCreationResponse.range) && - Objects.equals(this.pagination, ingestRangeCreationResponse.pagination); - } - - @Override - public int hashCode() { - return Objects.hash(range, pagination); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("class IngestRangeCreationResponse {\n"); - sb.append(" range: ").append(toIndentedString(range)).append("\n"); - sb.append(" pagination: ").append(toIndentedString(pagination)).append("\n"); - sb.append("}"); - return sb.toString(); - } - - /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). - */ - private String toIndentedString(Object o) { - if (o == null) { - return "null"; - } - return o.toString().replace("\n", "\n "); - } - -} - diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/ObjectStats.java b/clients/java/src/main/java/io/lakefs/clients/api/model/ObjectStats.java index 7e2fb43f455..8b4b132b51d 100644 --- a/clients/java/src/main/java/io/lakefs/clients/api/model/ObjectStats.java +++ b/clients/java/src/main/java/io/lakefs/clients/api/model/ObjectStats.java @@ -192,11 +192,11 @@ public ObjectStats physicalAddressExpiry(Long physicalAddressExpiry) { } /** - * If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. + * If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. * @return physicalAddressExpiry **/ @javax.annotation.Nullable - @ApiModelProperty(value = "If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. ") + @ApiModelProperty(value = "If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. ") public Long getPhysicalAddressExpiry() { return physicalAddressExpiry; diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/StageRangeCreation.java b/clients/java/src/main/java/io/lakefs/clients/api/model/StageRangeCreation.java deleted file mode 100644 index 2e52ec0ceeb..00000000000 --- a/clients/java/src/main/java/io/lakefs/clients/api/model/StageRangeCreation.java +++ /dev/null @@ -1,214 +0,0 @@ -/* - * lakeFS API - * lakeFS HTTP API - * - * The version of the OpenAPI document: 0.1.0 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - - -package io.lakefs.clients.api.model; - -import java.util.Objects; -import java.util.Arrays; -import com.google.gson.TypeAdapter; -import com.google.gson.annotations.JsonAdapter; -import com.google.gson.annotations.SerializedName; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonWriter; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; -import java.io.IOException; - -/** - * StageRangeCreation - */ -@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen") -public class StageRangeCreation { - public static final String SERIALIZED_NAME_FROM_SOURCE_U_R_I = "fromSourceURI"; - @SerializedName(SERIALIZED_NAME_FROM_SOURCE_U_R_I) - private String fromSourceURI; - - public static final String SERIALIZED_NAME_AFTER = "after"; - @SerializedName(SERIALIZED_NAME_AFTER) - private String after; - - public static final String SERIALIZED_NAME_PREPEND = "prepend"; - @SerializedName(SERIALIZED_NAME_PREPEND) - private String prepend; - - public static final String SERIALIZED_NAME_CONTINUATION_TOKEN = "continuation_token"; - @SerializedName(SERIALIZED_NAME_CONTINUATION_TOKEN) - private String continuationToken; - - public static final String SERIALIZED_NAME_STAGING_TOKEN = "staging_token"; - @SerializedName(SERIALIZED_NAME_STAGING_TOKEN) - private String stagingToken; - - - public StageRangeCreation fromSourceURI(String fromSourceURI) { - - this.fromSourceURI = fromSourceURI; - return this; - } - - /** - * The source location of the ingested files. Must match the lakeFS installation blockstore type. - * @return fromSourceURI - **/ - @javax.annotation.Nonnull - @ApiModelProperty(example = "s3://my-bucket/production/collections/", required = true, value = "The source location of the ingested files. Must match the lakeFS installation blockstore type.") - - public String getFromSourceURI() { - return fromSourceURI; - } - - - public void setFromSourceURI(String fromSourceURI) { - this.fromSourceURI = fromSourceURI; - } - - - public StageRangeCreation after(String after) { - - this.after = after; - return this; - } - - /** - * Only objects after this key would be ingested. - * @return after - **/ - @javax.annotation.Nonnull - @ApiModelProperty(example = "production/collections/some/file.parquet", required = true, value = "Only objects after this key would be ingested.") - - public String getAfter() { - return after; - } - - - public void setAfter(String after) { - this.after = after; - } - - - public StageRangeCreation prepend(String prepend) { - - this.prepend = prepend; - return this; - } - - /** - * A prefix to prepend to ingested objects. - * @return prepend - **/ - @javax.annotation.Nonnull - @ApiModelProperty(example = "collections/", required = true, value = "A prefix to prepend to ingested objects.") - - public String getPrepend() { - return prepend; - } - - - public void setPrepend(String prepend) { - this.prepend = prepend; - } - - - public StageRangeCreation continuationToken(String continuationToken) { - - this.continuationToken = continuationToken; - return this; - } - - /** - * Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key. - * @return continuationToken - **/ - @javax.annotation.Nullable - @ApiModelProperty(value = "Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key.") - - public String getContinuationToken() { - return continuationToken; - } - - - public void setContinuationToken(String continuationToken) { - this.continuationToken = continuationToken; - } - - - public StageRangeCreation stagingToken(String stagingToken) { - - this.stagingToken = stagingToken; - return this; - } - - /** - * Opaque. Client should pass staging_token if received from server on previous request - * @return stagingToken - **/ - @javax.annotation.Nullable - @ApiModelProperty(value = "Opaque. Client should pass staging_token if received from server on previous request") - - public String getStagingToken() { - return stagingToken; - } - - - public void setStagingToken(String stagingToken) { - this.stagingToken = stagingToken; - } - - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - StageRangeCreation stageRangeCreation = (StageRangeCreation) o; - return Objects.equals(this.fromSourceURI, stageRangeCreation.fromSourceURI) && - Objects.equals(this.after, stageRangeCreation.after) && - Objects.equals(this.prepend, stageRangeCreation.prepend) && - Objects.equals(this.continuationToken, stageRangeCreation.continuationToken) && - Objects.equals(this.stagingToken, stageRangeCreation.stagingToken); - } - - @Override - public int hashCode() { - return Objects.hash(fromSourceURI, after, prepend, continuationToken, stagingToken); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("class StageRangeCreation {\n"); - sb.append(" fromSourceURI: ").append(toIndentedString(fromSourceURI)).append("\n"); - sb.append(" after: ").append(toIndentedString(after)).append("\n"); - sb.append(" prepend: ").append(toIndentedString(prepend)).append("\n"); - sb.append(" continuationToken: ").append(toIndentedString(continuationToken)).append("\n"); - sb.append(" stagingToken: ").append(toIndentedString(stagingToken)).append("\n"); - sb.append("}"); - return sb.toString(); - } - - /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). - */ - private String toIndentedString(Object o) { - if (o == null) { - return "null"; - } - return o.toString().replace("\n", "\n "); - } - -} - diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/StagingLocation.java b/clients/java/src/main/java/io/lakefs/clients/api/model/StagingLocation.java index e2a513ca1ba..1d379ae7541 100644 --- a/clients/java/src/main/java/io/lakefs/clients/api/model/StagingLocation.java +++ b/clients/java/src/main/java/io/lakefs/clients/api/model/StagingLocation.java @@ -101,11 +101,11 @@ public StagingLocation presignedUrl(String presignedUrl) { } /** - * if presign=true is passed in the request, this field will contain a presigned URL to use when uploading + * if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading * @return presignedUrl **/ @javax.annotation.Nullable - @ApiModelProperty(value = "if presign=true is passed in the request, this field will contain a presigned URL to use when uploading") + @ApiModelProperty(value = "if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading") public String getPresignedUrl() { return presignedUrl; @@ -124,11 +124,11 @@ public StagingLocation presignedUrlExpiry(Long presignedUrlExpiry) { } /** - * If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. + * If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. * @return presignedUrlExpiry **/ @javax.annotation.Nullable - @ApiModelProperty(value = "If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. ") + @ApiModelProperty(value = "If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. ") public Long getPresignedUrlExpiry() { return presignedUrlExpiry; diff --git a/clients/java/src/test/java/io/lakefs/clients/api/ImportApiTest.java b/clients/java/src/test/java/io/lakefs/clients/api/ImportApiTest.java index a820297b909..a25cd6bade0 100644 --- a/clients/java/src/test/java/io/lakefs/clients/api/ImportApiTest.java +++ b/clients/java/src/test/java/io/lakefs/clients/api/ImportApiTest.java @@ -17,11 +17,7 @@ import io.lakefs.clients.api.model.Error; import io.lakefs.clients.api.model.ImportCreation; import io.lakefs.clients.api.model.ImportCreationResponse; -import io.lakefs.clients.api.model.ImportStatusResp; -import io.lakefs.clients.api.model.IngestRangeCreationResponse; -import io.lakefs.clients.api.model.MetaRangeCreation; -import io.lakefs.clients.api.model.MetaRangeCreationResponse; -import io.lakefs.clients.api.model.StageRangeCreation; +import io.lakefs.clients.api.model.ImportStatus; import org.junit.Test; import org.junit.Ignore; @@ -39,22 +35,6 @@ public class ImportApiTest { private final ImportApi api = new ImportApi(); - /** - * create a lakeFS metarange file from the given ranges - * - * - * - * @throws ApiException - * if the Api call fails - */ - @Test - public void createMetaRangeTest() throws ApiException { - String repository = null; - MetaRangeCreation metaRangeCreation = null; - MetaRangeCreationResponse response = api.createMetaRange(repository, metaRangeCreation); - // TODO: test validations - } - /** * cancel ongoing import * @@ -102,23 +82,7 @@ public void importStatusTest() throws ApiException { String repository = null; String branch = null; String id = null; - ImportStatusResp response = api.importStatus(repository, branch, id); - // TODO: test validations - } - - /** - * create a lakeFS range file from the source uri - * - * - * - * @throws ApiException - * if the Api call fails - */ - @Test - public void ingestRangeTest() throws ApiException { - String repository = null; - StageRangeCreation stageRangeCreation = null; - IngestRangeCreationResponse response = api.ingestRange(repository, stageRangeCreation); + ImportStatus response = api.importStatus(repository, branch, id); // TODO: test validations } diff --git a/clients/java/src/test/java/io/lakefs/clients/api/model/ImportPaginationTest.java b/clients/java/src/test/java/io/lakefs/clients/api/model/ImportPaginationTest.java deleted file mode 100644 index 11bc94b7281..00000000000 --- a/clients/java/src/test/java/io/lakefs/clients/api/model/ImportPaginationTest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * lakeFS API - * lakeFS HTTP API - * - * The version of the OpenAPI document: 0.1.0 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - - -package io.lakefs.clients.api.model; - -import com.google.gson.TypeAdapter; -import com.google.gson.annotations.JsonAdapter; -import com.google.gson.annotations.SerializedName; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonWriter; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; -import java.io.IOException; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; - - -/** - * Model tests for ImportPagination - */ -public class ImportPaginationTest { - private final ImportPagination model = new ImportPagination(); - - /** - * Model tests for ImportPagination - */ - @Test - public void testImportPagination() { - // TODO: test ImportPagination - } - - /** - * Test the property 'hasMore' - */ - @Test - public void hasMoreTest() { - // TODO: test hasMore - } - - /** - * Test the property 'continuationToken' - */ - @Test - public void continuationTokenTest() { - // TODO: test continuationToken - } - - /** - * Test the property 'lastKey' - */ - @Test - public void lastKeyTest() { - // TODO: test lastKey - } - - /** - * Test the property 'stagingToken' - */ - @Test - public void stagingTokenTest() { - // TODO: test stagingToken - } - -} diff --git a/clients/java/src/test/java/io/lakefs/clients/api/model/ImportStatusRespTest.java b/clients/java/src/test/java/io/lakefs/clients/api/model/ImportStatusTest.java similarity index 86% rename from clients/java/src/test/java/io/lakefs/clients/api/model/ImportStatusRespTest.java rename to clients/java/src/test/java/io/lakefs/clients/api/model/ImportStatusTest.java index 5c72f284475..5ba7f313f91 100644 --- a/clients/java/src/test/java/io/lakefs/clients/api/model/ImportStatusRespTest.java +++ b/clients/java/src/test/java/io/lakefs/clients/api/model/ImportStatusTest.java @@ -30,17 +30,17 @@ /** - * Model tests for ImportStatusResp + * Model tests for ImportStatus */ -public class ImportStatusRespTest { - private final ImportStatusResp model = new ImportStatusResp(); +public class ImportStatusTest { + private final ImportStatus model = new ImportStatus(); /** - * Model tests for ImportStatusResp + * Model tests for ImportStatus */ @Test - public void testImportStatusResp() { - // TODO: test ImportStatusResp + public void testImportStatus() { + // TODO: test ImportStatus } /** diff --git a/clients/java/src/test/java/io/lakefs/clients/api/model/IngestRangeCreationResponseTest.java b/clients/java/src/test/java/io/lakefs/clients/api/model/IngestRangeCreationResponseTest.java deleted file mode 100644 index 175a9cb5d24..00000000000 --- a/clients/java/src/test/java/io/lakefs/clients/api/model/IngestRangeCreationResponseTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * lakeFS API - * lakeFS HTTP API - * - * The version of the OpenAPI document: 0.1.0 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - - -package io.lakefs.clients.api.model; - -import com.google.gson.TypeAdapter; -import com.google.gson.annotations.JsonAdapter; -import com.google.gson.annotations.SerializedName; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonWriter; -import io.lakefs.clients.api.model.ImportPagination; -import io.lakefs.clients.api.model.RangeMetadata; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; -import java.io.IOException; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; - - -/** - * Model tests for IngestRangeCreationResponse - */ -public class IngestRangeCreationResponseTest { - private final IngestRangeCreationResponse model = new IngestRangeCreationResponse(); - - /** - * Model tests for IngestRangeCreationResponse - */ - @Test - public void testIngestRangeCreationResponse() { - // TODO: test IngestRangeCreationResponse - } - - /** - * Test the property 'range' - */ - @Test - public void rangeTest() { - // TODO: test range - } - - /** - * Test the property 'pagination' - */ - @Test - public void paginationTest() { - // TODO: test pagination - } - -} diff --git a/clients/java/src/test/java/io/lakefs/clients/api/model/StageRangeCreationTest.java b/clients/java/src/test/java/io/lakefs/clients/api/model/StageRangeCreationTest.java deleted file mode 100644 index adad6b988af..00000000000 --- a/clients/java/src/test/java/io/lakefs/clients/api/model/StageRangeCreationTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * lakeFS API - * lakeFS HTTP API - * - * The version of the OpenAPI document: 0.1.0 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - - -package io.lakefs.clients.api.model; - -import com.google.gson.TypeAdapter; -import com.google.gson.annotations.JsonAdapter; -import com.google.gson.annotations.SerializedName; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonWriter; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; -import java.io.IOException; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; - - -/** - * Model tests for StageRangeCreation - */ -public class StageRangeCreationTest { - private final StageRangeCreation model = new StageRangeCreation(); - - /** - * Model tests for StageRangeCreation - */ - @Test - public void testStageRangeCreation() { - // TODO: test StageRangeCreation - } - - /** - * Test the property 'fromSourceURI' - */ - @Test - public void fromSourceURITest() { - // TODO: test fromSourceURI - } - - /** - * Test the property 'after' - */ - @Test - public void afterTest() { - // TODO: test after - } - - /** - * Test the property 'prepend' - */ - @Test - public void prependTest() { - // TODO: test prepend - } - - /** - * Test the property 'continuationToken' - */ - @Test - public void continuationTokenTest() { - // TODO: test continuationToken - } - - /** - * Test the property 'stagingToken' - */ - @Test - public void stagingTokenTest() { - // TODO: test stagingToken - } - -} diff --git a/clients/python/.openapi-generator/FILES b/clients/python/.openapi-generator/FILES index 62467335172..44f38171c71 100644 --- a/clients/python/.openapi-generator/FILES +++ b/clients/python/.openapi-generator/FILES @@ -45,9 +45,7 @@ docs/ImportApi.md docs/ImportCreation.md docs/ImportCreationResponse.md docs/ImportLocation.md -docs/ImportPagination.md -docs/ImportStatusResp.md -docs/IngestRangeCreationResponse.md +docs/ImportStatus.md docs/InlineObject.md docs/InlineObject1.md docs/InternalApi.md @@ -90,7 +88,6 @@ docs/RetentionApi.md docs/RevertCreation.md docs/Setup.md docs/SetupState.md -docs/StageRangeCreation.md docs/StagingApi.md docs/StagingLocation.md docs/StagingMetadata.md @@ -167,9 +164,7 @@ lakefs_client/model/hook_run_list.py lakefs_client/model/import_creation.py lakefs_client/model/import_creation_response.py lakefs_client/model/import_location.py -lakefs_client/model/import_pagination.py -lakefs_client/model/import_status_resp.py -lakefs_client/model/ingest_range_creation_response.py +lakefs_client/model/import_status.py lakefs_client/model/inline_object.py lakefs_client/model/inline_object1.py lakefs_client/model/login_config.py @@ -206,7 +201,6 @@ lakefs_client/model/reset_creation.py lakefs_client/model/revert_creation.py lakefs_client/model/setup.py lakefs_client/model/setup_state.py -lakefs_client/model/stage_range_creation.py lakefs_client/model/staging_location.py lakefs_client/model/staging_metadata.py lakefs_client/model/statement.py @@ -273,9 +267,7 @@ test/test_import_api.py test/test_import_creation.py test/test_import_creation_response.py test/test_import_location.py -test/test_import_pagination.py -test/test_import_status_resp.py -test/test_ingest_range_creation_response.py +test/test_import_status.py test/test_inline_object.py test/test_inline_object1.py test/test_internal_api.py @@ -318,7 +310,6 @@ test/test_retention_api.py test/test_revert_creation.py test/test_setup.py test/test_setup_state.py -test/test_stage_range_creation.py test/test_staging_api.py test/test_staging_location.py test/test_staging_metadata.py diff --git a/clients/python/README.md b/clients/python/README.md index 76d7fb968eb..f3f42893be4 100644 --- a/clients/python/README.md +++ b/clients/python/README.md @@ -167,11 +167,9 @@ Class | Method | HTTP request | Description *ExperimentalApi* | [**get_otf_diffs**](docs/ExperimentalApi.md#get_otf_diffs) | **GET** /otf/diffs | get the available Open Table Format diffs *ExperimentalApi* | [**otf_diff**](docs/ExperimentalApi.md#otf_diff) | **GET** /repositories/{repository}/otf/refs/{left_ref}/diff/{right_ref} | perform otf diff *HealthCheckApi* | [**health_check**](docs/HealthCheckApi.md#health_check) | **GET** /healthcheck | -*ImportApi* | [**create_meta_range**](docs/ImportApi.md#create_meta_range) | **POST** /repositories/{repository}/branches/metaranges | create a lakeFS metarange file from the given ranges *ImportApi* | [**import_cancel**](docs/ImportApi.md#import_cancel) | **DELETE** /repositories/{repository}/branches/{branch}/import | cancel ongoing import *ImportApi* | [**import_start**](docs/ImportApi.md#import_start) | **POST** /repositories/{repository}/branches/{branch}/import | import data from object store *ImportApi* | [**import_status**](docs/ImportApi.md#import_status) | **GET** /repositories/{repository}/branches/{branch}/import | get import status -*ImportApi* | [**ingest_range**](docs/ImportApi.md#ingest_range) | **POST** /repositories/{repository}/branches/ranges | create a lakeFS range file from the source uri *InternalApi* | [**create_branch_protection_rule_preflight**](docs/InternalApi.md#create_branch_protection_rule_preflight) | **GET** /repositories/{repository}/branch_protection/set_allowed | *InternalApi* | [**get_auth_capabilities**](docs/InternalApi.md#get_auth_capabilities) | **GET** /auth/capabilities | list authentication capabilities supported *InternalApi* | [**get_setup_state**](docs/InternalApi.md#get_setup_state) | **GET** /setup_lakefs | check if the lakeFS installation is already set up @@ -258,9 +256,7 @@ Class | Method | HTTP request | Description - [ImportCreation](docs/ImportCreation.md) - [ImportCreationResponse](docs/ImportCreationResponse.md) - [ImportLocation](docs/ImportLocation.md) - - [ImportPagination](docs/ImportPagination.md) - - [ImportStatusResp](docs/ImportStatusResp.md) - - [IngestRangeCreationResponse](docs/IngestRangeCreationResponse.md) + - [ImportStatus](docs/ImportStatus.md) - [InlineObject](docs/InlineObject.md) - [InlineObject1](docs/InlineObject1.md) - [LoginConfig](docs/LoginConfig.md) @@ -297,7 +293,6 @@ Class | Method | HTTP request | Description - [RevertCreation](docs/RevertCreation.md) - [Setup](docs/Setup.md) - [SetupState](docs/SetupState.md) - - [StageRangeCreation](docs/StageRangeCreation.md) - [StagingLocation](docs/StagingLocation.md) - [StagingMetadata](docs/StagingMetadata.md) - [Statement](docs/Statement.md) diff --git a/clients/python/docs/AuthApi.md b/clients/python/docs/AuthApi.md index 9502b89365e..713d08444f3 100644 --- a/clients/python/docs/AuthApi.md +++ b/clients/python/docs/AuthApi.md @@ -2304,7 +2304,7 @@ Name | Type | Description | Notes | Status code | Description | Response headers | |-------------|-------------|------------------| -**200** | group memeber list | - | +**200** | group member list | - | **401** | Unauthorized | - | **0** | Internal Server Error | - | diff --git a/clients/python/docs/ImportApi.md b/clients/python/docs/ImportApi.md index 2816366428b..a14c88227af 100644 --- a/clients/python/docs/ImportApi.md +++ b/clients/python/docs/ImportApi.md @@ -4,135 +4,11 @@ All URIs are relative to *http://localhost/api/v1* Method | HTTP request | Description ------------- | ------------- | ------------- -[**create_meta_range**](ImportApi.md#create_meta_range) | **POST** /repositories/{repository}/branches/metaranges | create a lakeFS metarange file from the given ranges [**import_cancel**](ImportApi.md#import_cancel) | **DELETE** /repositories/{repository}/branches/{branch}/import | cancel ongoing import [**import_start**](ImportApi.md#import_start) | **POST** /repositories/{repository}/branches/{branch}/import | import data from object store [**import_status**](ImportApi.md#import_status) | **GET** /repositories/{repository}/branches/{branch}/import | get import status -[**ingest_range**](ImportApi.md#ingest_range) | **POST** /repositories/{repository}/branches/ranges | create a lakeFS range file from the source uri -# **create_meta_range** -> MetaRangeCreationResponse create_meta_range(repository, meta_range_creation) - -create a lakeFS metarange file from the given ranges - -### Example - -* Basic Authentication (basic_auth): -* Api Key Authentication (cookie_auth): -* Bearer (JWT) Authentication (jwt_token): -* Api Key Authentication (oidc_auth): -* Api Key Authentication (saml_auth): - -```python -import time -import lakefs_client -from lakefs_client.api import import_api -from lakefs_client.model.meta_range_creation import MetaRangeCreation -from lakefs_client.model.meta_range_creation_response import MetaRangeCreationResponse -from lakefs_client.model.error import Error -from pprint import pprint -# Defining the host is optional and defaults to http://localhost/api/v1 -# See configuration.py for a list of all supported configuration parameters. -configuration = lakefs_client.Configuration( - host = "http://localhost/api/v1" -) - -# The client must configure the authentication and authorization parameters -# in accordance with the API server security policy. -# Examples for each auth method are provided below, use the example that -# satisfies your auth use case. - -# Configure HTTP basic authorization: basic_auth -configuration = lakefs_client.Configuration( - username = 'YOUR_USERNAME', - password = 'YOUR_PASSWORD' -) - -# Configure API key authorization: cookie_auth -configuration.api_key['cookie_auth'] = 'YOUR_API_KEY' - -# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['cookie_auth'] = 'Bearer' - -# Configure Bearer authorization (JWT): jwt_token -configuration = lakefs_client.Configuration( - access_token = 'YOUR_BEARER_TOKEN' -) - -# Configure API key authorization: oidc_auth -configuration.api_key['oidc_auth'] = 'YOUR_API_KEY' - -# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['oidc_auth'] = 'Bearer' - -# Configure API key authorization: saml_auth -configuration.api_key['saml_auth'] = 'YOUR_API_KEY' - -# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['saml_auth'] = 'Bearer' - -# Enter a context with an instance of the API client -with lakefs_client.ApiClient(configuration) as api_client: - # Create an instance of the API class - api_instance = import_api.ImportApi(api_client) - repository = "repository_example" # str | - meta_range_creation = MetaRangeCreation( - ranges=[ - RangeMetadata( - id="480e19972a6fbe98ab8e81ae5efdfd1a29037587e91244e87abd4adefffdb01c", - min_key="production/collections/some/file_1.parquet", - max_key="production/collections/some/file_8229.parquet", - count=1, - estimated_size=1, - ), - ], - ) # MetaRangeCreation | - - # example passing only required values which don't have defaults set - try: - # create a lakeFS metarange file from the given ranges - api_response = api_instance.create_meta_range(repository, meta_range_creation) - pprint(api_response) - except lakefs_client.ApiException as e: - print("Exception when calling ImportApi->create_meta_range: %s\n" % e) -``` - - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **repository** | **str**| | - **meta_range_creation** | [**MetaRangeCreation**](MetaRangeCreation.md)| | - -### Return type - -[**MetaRangeCreationResponse**](MetaRangeCreationResponse.md) - -### Authorization - -[basic_auth](../README.md#basic_auth), [cookie_auth](../README.md#cookie_auth), [jwt_token](../README.md#jwt_token), [oidc_auth](../README.md#oidc_auth), [saml_auth](../README.md#saml_auth) - -### HTTP request headers - - - **Content-Type**: application/json - - **Accept**: application/json - - -### HTTP response details - -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**201** | metarange metadata | - | -**400** | Validation Error | - | -**401** | Unauthorized | - | -**403** | Forbidden | - | -**404** | Resource Not Found | - | -**0** | Internal Server Error | - | - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - # **import_cancel** > import_cancel(repository, branch, id) @@ -373,7 +249,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **import_status** -> ImportStatusResp import_status(repository, branch, id) +> ImportStatus import_status(repository, branch, id) get import status @@ -389,7 +265,7 @@ get import status import time import lakefs_client from lakefs_client.api import import_api -from lakefs_client.model.import_status_resp import ImportStatusResp +from lakefs_client.model.import_status import ImportStatus from lakefs_client.model.error import Error from pprint import pprint # Defining the host is optional and defaults to http://localhost/api/v1 @@ -460,7 +336,7 @@ Name | Type | Description | Notes ### Return type -[**ImportStatusResp**](ImportStatusResp.md) +[**ImportStatus**](ImportStatus.md) ### Authorization @@ -483,120 +359,3 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) -# **ingest_range** -> IngestRangeCreationResponse ingest_range(repository, stage_range_creation) - -create a lakeFS range file from the source uri - -### Example - -* Basic Authentication (basic_auth): -* Api Key Authentication (cookie_auth): -* Bearer (JWT) Authentication (jwt_token): -* Api Key Authentication (oidc_auth): -* Api Key Authentication (saml_auth): - -```python -import time -import lakefs_client -from lakefs_client.api import import_api -from lakefs_client.model.ingest_range_creation_response import IngestRangeCreationResponse -from lakefs_client.model.stage_range_creation import StageRangeCreation -from lakefs_client.model.error import Error -from pprint import pprint -# Defining the host is optional and defaults to http://localhost/api/v1 -# See configuration.py for a list of all supported configuration parameters. -configuration = lakefs_client.Configuration( - host = "http://localhost/api/v1" -) - -# The client must configure the authentication and authorization parameters -# in accordance with the API server security policy. -# Examples for each auth method are provided below, use the example that -# satisfies your auth use case. - -# Configure HTTP basic authorization: basic_auth -configuration = lakefs_client.Configuration( - username = 'YOUR_USERNAME', - password = 'YOUR_PASSWORD' -) - -# Configure API key authorization: cookie_auth -configuration.api_key['cookie_auth'] = 'YOUR_API_KEY' - -# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['cookie_auth'] = 'Bearer' - -# Configure Bearer authorization (JWT): jwt_token -configuration = lakefs_client.Configuration( - access_token = 'YOUR_BEARER_TOKEN' -) - -# Configure API key authorization: oidc_auth -configuration.api_key['oidc_auth'] = 'YOUR_API_KEY' - -# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['oidc_auth'] = 'Bearer' - -# Configure API key authorization: saml_auth -configuration.api_key['saml_auth'] = 'YOUR_API_KEY' - -# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['saml_auth'] = 'Bearer' - -# Enter a context with an instance of the API client -with lakefs_client.ApiClient(configuration) as api_client: - # Create an instance of the API class - api_instance = import_api.ImportApi(api_client) - repository = "repository_example" # str | - stage_range_creation = StageRangeCreation( - from_source_uri="s3://my-bucket/production/collections/", - after="production/collections/some/file.parquet", - prepend="collections/", - continuation_token="continuation_token_example", - staging_token="staging_token_example", - ) # StageRangeCreation | - - # example passing only required values which don't have defaults set - try: - # create a lakeFS range file from the source uri - api_response = api_instance.ingest_range(repository, stage_range_creation) - pprint(api_response) - except lakefs_client.ApiException as e: - print("Exception when calling ImportApi->ingest_range: %s\n" % e) -``` - - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **repository** | **str**| | - **stage_range_creation** | [**StageRangeCreation**](StageRangeCreation.md)| | - -### Return type - -[**IngestRangeCreationResponse**](IngestRangeCreationResponse.md) - -### Authorization - -[basic_auth](../README.md#basic_auth), [cookie_auth](../README.md#cookie_auth), [jwt_token](../README.md#jwt_token), [oidc_auth](../README.md#oidc_auth), [saml_auth](../README.md#saml_auth) - -### HTTP request headers - - - **Content-Type**: application/json - - **Accept**: application/json - - -### HTTP response details - -| Status code | Description | Response headers | -|-------------|-------------|------------------| -**201** | range metadata | - | -**400** | Validation Error | - | -**401** | Unauthorized | - | -**404** | Resource Not Found | - | -**0** | Internal Server Error | - | - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - diff --git a/clients/python/docs/ImportLocation.md b/clients/python/docs/ImportLocation.md index 4825cd041ac..96b789a5955 100644 --- a/clients/python/docs/ImportLocation.md +++ b/clients/python/docs/ImportLocation.md @@ -5,7 +5,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **type** | **str** | Path type, can either be 'common_prefix' or 'object' | -**path** | **str** | A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type. | +**path** | **str** | A source location to import path or to a single object. Must match the lakeFS installation blockstore type. | **destination** | **str** | Destination for the imported objects on the branch | **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] diff --git a/clients/python/docs/ImportPagination.md b/clients/python/docs/ImportPagination.md deleted file mode 100644 index 9522ca6abaa..00000000000 --- a/clients/python/docs/ImportPagination.md +++ /dev/null @@ -1,15 +0,0 @@ -# ImportPagination - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**has_more** | **bool** | More keys to be ingested. | -**last_key** | **str** | Last object store key that was ingested. | -**continuation_token** | **str** | Opaque. Token used to import the next range. | [optional] -**staging_token** | **str** | Staging token for skipped objects during ingest | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/clients/python/docs/ImportStatusResp.md b/clients/python/docs/ImportStatus.md similarity index 97% rename from clients/python/docs/ImportStatusResp.md rename to clients/python/docs/ImportStatus.md index 2679fefd2b3..6d4daaa15e2 100644 --- a/clients/python/docs/ImportStatusResp.md +++ b/clients/python/docs/ImportStatus.md @@ -1,4 +1,4 @@ -# ImportStatusResp +# ImportStatus ## Properties diff --git a/clients/python/docs/IngestRangeCreationResponse.md b/clients/python/docs/IngestRangeCreationResponse.md deleted file mode 100644 index 55637f92b4e..00000000000 --- a/clients/python/docs/IngestRangeCreationResponse.md +++ /dev/null @@ -1,13 +0,0 @@ -# IngestRangeCreationResponse - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**range** | [**RangeMetadata**](RangeMetadata.md) | | [optional] -**pagination** | [**ImportPagination**](ImportPagination.md) | | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/clients/python/docs/ObjectStats.md b/clients/python/docs/ObjectStats.md index 23733d89b21..d5be770ca89 100644 --- a/clients/python/docs/ObjectStats.md +++ b/clients/python/docs/ObjectStats.md @@ -9,7 +9,7 @@ Name | Type | Description | Notes **physical_address** | **str** | The location of the object on the underlying object store. Formatted as a native URI with the object store type as scheme (\"s3://...\", \"gs://...\", etc.) Or, in the case of presign=true, will be an HTTP URL to be consumed via regular HTTP GET | **checksum** | **str** | | **mtime** | **int** | Unix Epoch in seconds | -**physical_address_expiry** | **int** | If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional] +**physical_address_expiry** | **int** | If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional] **size_bytes** | **int** | | [optional] **metadata** | [**ObjectUserMetadata**](ObjectUserMetadata.md) | | [optional] **content_type** | **str** | Object media type | [optional] diff --git a/clients/python/docs/StageRangeCreation.md b/clients/python/docs/StageRangeCreation.md deleted file mode 100644 index c0b4173d6f4..00000000000 --- a/clients/python/docs/StageRangeCreation.md +++ /dev/null @@ -1,16 +0,0 @@ -# StageRangeCreation - - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**from_source_uri** | **str** | The source location of the ingested files. Must match the lakeFS installation blockstore type. | -**after** | **str** | Only objects after this key would be ingested. | -**prepend** | **str** | A prefix to prepend to ingested objects. | -**continuation_token** | **str** | Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key. | [optional] -**staging_token** | **str** | Opaque. Client should pass staging_token if received from server on previous request | [optional] -**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/clients/python/docs/StagingLocation.md b/clients/python/docs/StagingLocation.md index 35e4712f55d..1e6dcdf1190 100644 --- a/clients/python/docs/StagingLocation.md +++ b/clients/python/docs/StagingLocation.md @@ -7,8 +7,8 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **token** | **str** | opaque staging token to use to link uploaded object | **physical_address** | **str** | | [optional] -**presigned_url** | **str, none_type** | if presign=true is passed in the request, this field will contain a presigned URL to use when uploading | [optional] -**presigned_url_expiry** | **int** | If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional] +**presigned_url** | **str, none_type** | if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading | [optional] +**presigned_url_expiry** | **int** | If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/clients/python/lakefs_client/api/import_api.py b/clients/python/lakefs_client/api/import_api.py index edc77687799..5bc04b88c76 100644 --- a/clients/python/lakefs_client/api/import_api.py +++ b/clients/python/lakefs_client/api/import_api.py @@ -25,11 +25,7 @@ from lakefs_client.model.error import Error from lakefs_client.model.import_creation import ImportCreation from lakefs_client.model.import_creation_response import ImportCreationResponse -from lakefs_client.model.import_status_resp import ImportStatusResp -from lakefs_client.model.ingest_range_creation_response import IngestRangeCreationResponse -from lakefs_client.model.meta_range_creation import MetaRangeCreation -from lakefs_client.model.meta_range_creation_response import MetaRangeCreationResponse -from lakefs_client.model.stage_range_creation import StageRangeCreation +from lakefs_client.model.import_status import ImportStatus class ImportApi(object): @@ -43,68 +39,6 @@ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client - self.create_meta_range_endpoint = _Endpoint( - settings={ - 'response_type': (MetaRangeCreationResponse,), - 'auth': [ - 'basic_auth', - 'cookie_auth', - 'jwt_token', - 'oidc_auth', - 'saml_auth' - ], - 'endpoint_path': '/repositories/{repository}/branches/metaranges', - 'operation_id': 'create_meta_range', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'repository', - 'meta_range_creation', - ], - 'required': [ - 'repository', - 'meta_range_creation', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'repository': - (str,), - 'meta_range_creation': - (MetaRangeCreation,), - }, - 'attribute_map': { - 'repository': 'repository', - }, - 'location_map': { - 'repository': 'path', - 'meta_range_creation': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) self.import_cancel_endpoint = _Endpoint( settings={ 'response_type': None, @@ -242,7 +176,7 @@ def __init__(self, api_client=None): ) self.import_status_endpoint = _Endpoint( settings={ - 'response_type': (ImportStatusResp,), + 'response_type': (ImportStatus,), 'auth': [ 'basic_auth', 'cookie_auth', @@ -307,137 +241,6 @@ def __init__(self, api_client=None): }, api_client=api_client ) - self.ingest_range_endpoint = _Endpoint( - settings={ - 'response_type': (IngestRangeCreationResponse,), - 'auth': [ - 'basic_auth', - 'cookie_auth', - 'jwt_token', - 'oidc_auth', - 'saml_auth' - ], - 'endpoint_path': '/repositories/{repository}/branches/ranges', - 'operation_id': 'ingest_range', - 'http_method': 'POST', - 'servers': None, - }, - params_map={ - 'all': [ - 'repository', - 'stage_range_creation', - ], - 'required': [ - 'repository', - 'stage_range_creation', - ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, - root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'repository': - (str,), - 'stage_range_creation': - (StageRangeCreation,), - }, - 'attribute_map': { - 'repository': 'repository', - }, - 'location_map': { - 'repository': 'path', - 'stage_range_creation': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] - }, - api_client=api_client - ) - - def create_meta_range( - self, - repository, - meta_range_creation, - **kwargs - ): - """create a lakeFS metarange file from the given ranges # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.create_meta_range(repository, meta_range_creation, async_req=True) - >>> result = thread.get() - - Args: - repository (str): - meta_range_creation (MetaRangeCreation): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - MetaRangeCreationResponse - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['repository'] = \ - repository - kwargs['meta_range_creation'] = \ - meta_range_creation - return self.create_meta_range_endpoint.call_with_http_info(**kwargs) def import_cancel( self, @@ -627,7 +430,7 @@ def import_status( async_req (bool): execute request asynchronously Returns: - ImportStatusResp + ImportStatus If the method is called asynchronously, returns the request thread. """ @@ -658,72 +461,3 @@ def import_status( id return self.import_status_endpoint.call_with_http_info(**kwargs) - def ingest_range( - self, - repository, - stage_range_creation, - **kwargs - ): - """create a lakeFS range file from the source uri # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.ingest_range(repository, stage_range_creation, async_req=True) - >>> result = thread.get() - - Args: - repository (str): - stage_range_creation (StageRangeCreation): - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IngestRangeCreationResponse - If the method is called asynchronously, returns the request - thread. - """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['repository'] = \ - repository - kwargs['stage_range_creation'] = \ - stage_range_creation - return self.ingest_range_endpoint.call_with_http_info(**kwargs) - diff --git a/clients/python/lakefs_client/model/import_location.py b/clients/python/lakefs_client/model/import_location.py index 27198dfbe4e..2aa881a8d38 100644 --- a/clients/python/lakefs_client/model/import_location.py +++ b/clients/python/lakefs_client/model/import_location.py @@ -114,7 +114,7 @@ def _from_openapi_data(cls, type, path, destination, *args, **kwargs): # noqa: Args: type (str): Path type, can either be 'common_prefix' or 'object' - path (str): A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type. + path (str): A source location to import path or to a single object. Must match the lakeFS installation blockstore type. destination (str): Destination for the imported objects on the branch Keyword Args: @@ -203,7 +203,7 @@ def __init__(self, type, path, destination, *args, **kwargs): # noqa: E501 Args: type (str): Path type, can either be 'common_prefix' or 'object' - path (str): A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type. + path (str): A source location to import path or to a single object. Must match the lakeFS installation blockstore type. destination (str): Destination for the imported objects on the branch Keyword Args: diff --git a/clients/python/lakefs_client/model/import_pagination.py b/clients/python/lakefs_client/model/import_pagination.py deleted file mode 100644 index 18deae1654d..00000000000 --- a/clients/python/lakefs_client/model/import_pagination.py +++ /dev/null @@ -1,276 +0,0 @@ -""" - lakeFS API - - lakeFS HTTP API # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Contact: services@treeverse.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from lakefs_client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from lakefs_client.exceptions import ApiAttributeError - - - -class ImportPagination(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'has_more': (bool,), # noqa: E501 - 'last_key': (str,), # noqa: E501 - 'continuation_token': (str,), # noqa: E501 - 'staging_token': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'has_more': 'has_more', # noqa: E501 - 'last_key': 'last_key', # noqa: E501 - 'continuation_token': 'continuation_token', # noqa: E501 - 'staging_token': 'staging_token', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, has_more, last_key, *args, **kwargs): # noqa: E501 - """ImportPagination - a model defined in OpenAPI - - Args: - has_more (bool): More keys to be ingested. - last_key (str): Last object store key that was ingested. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - continuation_token (str): Opaque. Token used to import the next range.. [optional] # noqa: E501 - staging_token (str): Staging token for skipped objects during ingest. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.has_more = has_more - self.last_key = last_key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, has_more, last_key, *args, **kwargs): # noqa: E501 - """ImportPagination - a model defined in OpenAPI - - Args: - has_more (bool): More keys to be ingested. - last_key (str): Last object store key that was ingested. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - continuation_token (str): Opaque. Token used to import the next range.. [optional] # noqa: E501 - staging_token (str): Staging token for skipped objects during ingest. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.has_more = has_more - self.last_key = last_key - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/clients/python/lakefs_client/model/import_status_resp.py b/clients/python/lakefs_client/model/import_status.py similarity index 98% rename from clients/python/lakefs_client/model/import_status_resp.py rename to clients/python/lakefs_client/model/import_status.py index 4643fd64228..a07f1b6d515 100644 --- a/clients/python/lakefs_client/model/import_status_resp.py +++ b/clients/python/lakefs_client/model/import_status.py @@ -37,7 +37,7 @@ def lazy_import(): globals()['Error'] = Error -class ImportStatusResp(ModelNormal): +class ImportStatus(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -120,7 +120,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, completed, update_time, *args, **kwargs): # noqa: E501 - """ImportStatusResp - a model defined in OpenAPI + """ImportStatus - a model defined in OpenAPI Args: completed (bool): @@ -211,7 +211,7 @@ def _from_openapi_data(cls, completed, update_time, *args, **kwargs): # noqa: E @convert_js_args_to_python_args def __init__(self, completed, update_time, *args, **kwargs): # noqa: E501 - """ImportStatusResp - a model defined in OpenAPI + """ImportStatus - a model defined in OpenAPI Args: completed (bool): diff --git a/clients/python/lakefs_client/model/ingest_range_creation_response.py b/clients/python/lakefs_client/model/ingest_range_creation_response.py deleted file mode 100644 index 8bdf19921ec..00000000000 --- a/clients/python/lakefs_client/model/ingest_range_creation_response.py +++ /dev/null @@ -1,268 +0,0 @@ -""" - lakeFS API - - lakeFS HTTP API # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Contact: services@treeverse.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from lakefs_client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from lakefs_client.exceptions import ApiAttributeError - - -def lazy_import(): - from lakefs_client.model.import_pagination import ImportPagination - from lakefs_client.model.range_metadata import RangeMetadata - globals()['ImportPagination'] = ImportPagination - globals()['RangeMetadata'] = RangeMetadata - - -class IngestRangeCreationResponse(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - 'range': (RangeMetadata,), # noqa: E501 - 'pagination': (ImportPagination,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'range': 'range', # noqa: E501 - 'pagination': 'pagination', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IngestRangeCreationResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - range (RangeMetadata): [optional] # noqa: E501 - pagination (ImportPagination): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IngestRangeCreationResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - range (RangeMetadata): [optional] # noqa: E501 - pagination (ImportPagination): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/clients/python/lakefs_client/model/object_stats.py b/clients/python/lakefs_client/model/object_stats.py index acf36fb7dbe..109bc71c5f6 100644 --- a/clients/python/lakefs_client/model/object_stats.py +++ b/clients/python/lakefs_client/model/object_stats.py @@ -168,7 +168,7 @@ def _from_openapi_data(cls, path, path_type, physical_address, checksum, mtime, Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - physical_address_expiry (int): If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501 + physical_address_expiry (int): If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501 size_bytes (int): [optional] # noqa: E501 metadata (ObjectUserMetadata): [optional] # noqa: E501 content_type (str): Object media type. [optional] # noqa: E501 @@ -265,7 +265,7 @@ def __init__(self, path, path_type, physical_address, checksum, mtime, *args, ** Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - physical_address_expiry (int): If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501 + physical_address_expiry (int): If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501 size_bytes (int): [optional] # noqa: E501 metadata (ObjectUserMetadata): [optional] # noqa: E501 content_type (str): Object media type. [optional] # noqa: E501 diff --git a/clients/python/lakefs_client/model/stage_range_creation.py b/clients/python/lakefs_client/model/stage_range_creation.py deleted file mode 100644 index 7e639a55536..00000000000 --- a/clients/python/lakefs_client/model/stage_range_creation.py +++ /dev/null @@ -1,282 +0,0 @@ -""" - lakeFS API - - lakeFS HTTP API # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Contact: services@treeverse.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from lakefs_client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from lakefs_client.exceptions import ApiAttributeError - - - -class StageRangeCreation(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - } - - validations = { - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - 'from_source_uri': (str,), # noqa: E501 - 'after': (str,), # noqa: E501 - 'prepend': (str,), # noqa: E501 - 'continuation_token': (str,), # noqa: E501 - 'staging_token': (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - - attribute_map = { - 'from_source_uri': 'fromSourceURI', # noqa: E501 - 'after': 'after', # noqa: E501 - 'prepend': 'prepend', # noqa: E501 - 'continuation_token': 'continuation_token', # noqa: E501 - 'staging_token': 'staging_token', # noqa: E501 - } - - read_only_vars = { - } - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, from_source_uri, after, prepend, *args, **kwargs): # noqa: E501 - """StageRangeCreation - a model defined in OpenAPI - - Args: - from_source_uri (str): The source location of the ingested files. Must match the lakeFS installation blockstore type. - after (str): Only objects after this key would be ingested. - prepend (str): A prefix to prepend to ingested objects. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - continuation_token (str): Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key.. [optional] # noqa: E501 - staging_token (str): Opaque. Client should pass staging_token if received from server on previous request. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.from_source_uri = from_source_uri - self.after = after - self.prepend = prepend - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) - - @convert_js_args_to_python_args - def __init__(self, from_source_uri, after, prepend, *args, **kwargs): # noqa: E501 - """StageRangeCreation - a model defined in OpenAPI - - Args: - from_source_uri (str): The source location of the ingested files. Must match the lakeFS installation blockstore type. - after (str): Only objects after this key would be ingested. - prepend (str): A prefix to prepend to ingested objects. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - continuation_token (str): Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key.. [optional] # noqa: E501 - staging_token (str): Opaque. Client should pass staging_token if received from server on previous request. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.from_source_uri = from_source_uri - self.after = after - self.prepend = prepend - for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") diff --git a/clients/python/lakefs_client/model/staging_location.py b/clients/python/lakefs_client/model/staging_location.py index b9ee2d7002a..d56b5cfc04a 100644 --- a/clients/python/lakefs_client/model/staging_location.py +++ b/clients/python/lakefs_client/model/staging_location.py @@ -145,8 +145,8 @@ def _from_openapi_data(cls, token, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) physical_address (str): [optional] # noqa: E501 - presigned_url (str, none_type): if presign=true is passed in the request, this field will contain a presigned URL to use when uploading. [optional] # noqa: E501 - presigned_url_expiry (int): If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501 + presigned_url (str, none_type): if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading. [optional] # noqa: E501 + presigned_url_expiry (int): If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -233,8 +233,8 @@ def __init__(self, token, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) physical_address (str): [optional] # noqa: E501 - presigned_url (str, none_type): if presign=true is passed in the request, this field will contain a presigned URL to use when uploading. [optional] # noqa: E501 - presigned_url_expiry (int): If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501 + presigned_url (str, none_type): if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading. [optional] # noqa: E501 + presigned_url_expiry (int): If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/clients/python/lakefs_client/models/__init__.py b/clients/python/lakefs_client/models/__init__.py index cf2a403a395..30a1c3059cb 100644 --- a/clients/python/lakefs_client/models/__init__.py +++ b/clients/python/lakefs_client/models/__init__.py @@ -45,9 +45,7 @@ from lakefs_client.model.import_creation import ImportCreation from lakefs_client.model.import_creation_response import ImportCreationResponse from lakefs_client.model.import_location import ImportLocation -from lakefs_client.model.import_pagination import ImportPagination -from lakefs_client.model.import_status_resp import ImportStatusResp -from lakefs_client.model.ingest_range_creation_response import IngestRangeCreationResponse +from lakefs_client.model.import_status import ImportStatus from lakefs_client.model.inline_object import InlineObject from lakefs_client.model.inline_object1 import InlineObject1 from lakefs_client.model.login_config import LoginConfig @@ -84,7 +82,6 @@ from lakefs_client.model.revert_creation import RevertCreation from lakefs_client.model.setup import Setup from lakefs_client.model.setup_state import SetupState -from lakefs_client.model.stage_range_creation import StageRangeCreation from lakefs_client.model.staging_location import StagingLocation from lakefs_client.model.staging_metadata import StagingMetadata from lakefs_client.model.statement import Statement diff --git a/clients/python/test/test_import_api.py b/clients/python/test/test_import_api.py index cef413386eb..079eed8c1b6 100644 --- a/clients/python/test/test_import_api.py +++ b/clients/python/test/test_import_api.py @@ -24,13 +24,6 @@ def setUp(self): def tearDown(self): pass - def test_create_meta_range(self): - """Test case for create_meta_range - - create a lakeFS metarange file from the given ranges # noqa: E501 - """ - pass - def test_import_cancel(self): """Test case for import_cancel @@ -52,13 +45,6 @@ def test_import_status(self): """ pass - def test_ingest_range(self): - """Test case for ingest_range - - create a lakeFS range file from the source uri # noqa: E501 - """ - pass - if __name__ == '__main__': unittest.main() diff --git a/clients/python/test/test_import_pagination.py b/clients/python/test/test_import_pagination.py deleted file mode 100644 index f65f829c102..00000000000 --- a/clients/python/test/test_import_pagination.py +++ /dev/null @@ -1,36 +0,0 @@ -""" - lakeFS API - - lakeFS HTTP API # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Contact: services@treeverse.io - Generated by: https://openapi-generator.tech -""" - - -import sys -import unittest - -import lakefs_client -from lakefs_client.model.import_pagination import ImportPagination - - -class TestImportPagination(unittest.TestCase): - """ImportPagination unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def testImportPagination(self): - """Test ImportPagination""" - # FIXME: construct object with mandatory attributes with example values - # model = ImportPagination() # noqa: E501 - pass - - -if __name__ == '__main__': - unittest.main() diff --git a/clients/python/test/test_import_status_resp.py b/clients/python/test/test_import_status.py similarity index 68% rename from clients/python/test/test_import_status_resp.py rename to clients/python/test/test_import_status.py index 2cc4dd8b245..c7a593b0ecc 100644 --- a/clients/python/test/test_import_status_resp.py +++ b/clients/python/test/test_import_status.py @@ -17,11 +17,11 @@ from lakefs_client.model.error import Error globals()['Commit'] = Commit globals()['Error'] = Error -from lakefs_client.model.import_status_resp import ImportStatusResp +from lakefs_client.model.import_status import ImportStatus -class TestImportStatusResp(unittest.TestCase): - """ImportStatusResp unit test stubs""" +class TestImportStatus(unittest.TestCase): + """ImportStatus unit test stubs""" def setUp(self): pass @@ -29,10 +29,10 @@ def setUp(self): def tearDown(self): pass - def testImportStatusResp(self): - """Test ImportStatusResp""" + def testImportStatus(self): + """Test ImportStatus""" # FIXME: construct object with mandatory attributes with example values - # model = ImportStatusResp() # noqa: E501 + # model = ImportStatus() # noqa: E501 pass diff --git a/clients/python/test/test_ingest_range_creation_response.py b/clients/python/test/test_ingest_range_creation_response.py deleted file mode 100644 index 6d3fb9326ae..00000000000 --- a/clients/python/test/test_ingest_range_creation_response.py +++ /dev/null @@ -1,40 +0,0 @@ -""" - lakeFS API - - lakeFS HTTP API # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Contact: services@treeverse.io - Generated by: https://openapi-generator.tech -""" - - -import sys -import unittest - -import lakefs_client -from lakefs_client.model.import_pagination import ImportPagination -from lakefs_client.model.range_metadata import RangeMetadata -globals()['ImportPagination'] = ImportPagination -globals()['RangeMetadata'] = RangeMetadata -from lakefs_client.model.ingest_range_creation_response import IngestRangeCreationResponse - - -class TestIngestRangeCreationResponse(unittest.TestCase): - """IngestRangeCreationResponse unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def testIngestRangeCreationResponse(self): - """Test IngestRangeCreationResponse""" - # FIXME: construct object with mandatory attributes with example values - # model = IngestRangeCreationResponse() # noqa: E501 - pass - - -if __name__ == '__main__': - unittest.main() diff --git a/clients/python/test/test_stage_range_creation.py b/clients/python/test/test_stage_range_creation.py deleted file mode 100644 index f75cbff5acd..00000000000 --- a/clients/python/test/test_stage_range_creation.py +++ /dev/null @@ -1,36 +0,0 @@ -""" - lakeFS API - - lakeFS HTTP API # noqa: E501 - - The version of the OpenAPI document: 0.1.0 - Contact: services@treeverse.io - Generated by: https://openapi-generator.tech -""" - - -import sys -import unittest - -import lakefs_client -from lakefs_client.model.stage_range_creation import StageRangeCreation - - -class TestStageRangeCreation(unittest.TestCase): - """StageRangeCreation unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def testStageRangeCreation(self): - """Test StageRangeCreation""" - # FIXME: construct object with mandatory attributes with example values - # model = StageRangeCreation() # noqa: E501 - pass - - -if __name__ == '__main__': - unittest.main() diff --git a/docs/assets/js/swagger.yml b/docs/assets/js/swagger.yml index 4df6c7fd5f9..3b9ebba8be6 100644 --- a/docs/assets/js/swagger.yml +++ b/docs/assets/js/swagger.yml @@ -5,8 +5,8 @@ info: title: lakeFS API license: name: "Apache 2.0" - url: "https://www.apache.org/licenses/LICENSE-2.0.html" - version: "0.1.0" + url: https://www.apache.org/licenses/LICENSE-2.0.html + version: 0.1.0 servers: - url: "/api/v1" @@ -151,25 +151,6 @@ components: minimum: 0 description: Maximal number of entries per page - ImportPagination: - type: object - required: - - has_more - - last_key - properties: - has_more: - type: boolean - description: More keys to be ingested. - continuation_token: - type: string - description: Opaque. Token used to import the next range. - last_key: - type: string - description: Last object store key that was ingested. - staging_token: - type: string - description: Staging token for skipped objects during ingest - Repository: type: object required: @@ -290,9 +271,9 @@ components: type: integer format: int64 description: | - If present and nonzero, physical_address is a presigned URL and + If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than - the presigned URL lifetime if an authentication token is about + the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. @@ -1132,14 +1113,14 @@ components: presigned_url: type: string nullable: true - description: if presign=true is passed in the request, this field will contain a presigned URL to use when uploading + description: if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading presigned_url_expiry: type: integer format: int64 description: | - If present and nonzero, physical_address is a presigned URL and + If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than - the presigned URL lifetime if an authentication token is about + the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. @@ -1267,7 +1248,7 @@ components: description: Path type, can either be 'common_prefix' or 'object' path: type: string - description: A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type. + description: A source location to import path or to a single object. Must match the lakeFS installation blockstore type. example: s3://my-bucket/production/collections/ destination: type: string @@ -1295,32 +1276,6 @@ components: destination: collections/file1 type: object - StageRangeCreation: - type: object - required: - - fromSourceURI - - after - - prepend - properties: - fromSourceURI: - type: string - description: The source location of the ingested files. Must match the lakeFS installation blockstore type. - example: s3://my-bucket/production/collections/ - after: - type: string - description: Only objects after this key would be ingested. - example: production/collections/some/file.parquet - prepend: - type: string - description: A prefix to prepend to ingested objects. - example: collections/ - continuation_token: - type: string - description: Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key. - staging_token: - type: string - description: Opaque. Client should pass staging_token if received from server on previous request - RangeMetadata: type: object required: @@ -1349,15 +1304,7 @@ components: type: integer description: Estimated size of the range in bytes - IngestRangeCreationResponse: - type: object - properties: - range: - $ref: "#/components/schemas/RangeMetadata" - pagination: - $ref: "#/components/schemas/ImportPagination" - - ImportStatusResp: + ImportStatus: type: object properties: completed: @@ -1889,7 +1836,7 @@ paths: - $ref: "#/components/parameters/PaginationAmount" responses: 200: - description: group memeber list + description: group member list content: application/json: schema: @@ -3410,7 +3357,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/ImportStatusResp" + $ref: "#/components/schemas/ImportStatus" 401: $ref: "#/components/responses/Unauthorized" 404: @@ -3469,77 +3416,6 @@ paths: default: $ref: "#/components/responses/ServerError" - /repositories/{repository}/branches/metaranges: - parameters: - - in: path - name: repository - required: true - schema: - type: string - post: - tags: - - import - operationId: createMetaRange - summary: create a lakeFS metarange file from the given ranges - requestBody: - required: true - content: - application/json: - schema: - $ref: "#/components/schemas/MetaRangeCreation" - responses: - 201: - description: metarange metadata - content: - application/json: - schema: - $ref: "#/components/schemas/MetaRangeCreationResponse" - 400: - $ref: "#/components/responses/ValidationError" - 401: - $ref: "#/components/responses/Unauthorized" - 403: - $ref: "#/components/responses/Forbidden" - 404: - $ref: "#/components/responses/NotFound" - default: - $ref: "#/components/responses/ServerError" - - /repositories/{repository}/branches/ranges: - parameters: - - in: path - name: repository - required: true - schema: - type: string - post: - tags: - - import - operationId: ingestRange - summary: create a lakeFS range file from the source uri - requestBody: - required: true - content: - application/json: - schema: - $ref: "#/components/schemas/StageRangeCreation" - responses: - 201: - description: range metadata - content: - application/json: - schema: - $ref: "#/components/schemas/IngestRangeCreationResponse" - - 400: - $ref: "#/components/responses/ValidationError" - 401: - $ref: "#/components/responses/Unauthorized" - 404: - $ref: "#/components/responses/NotFound" - default: - $ref: "#/components/responses/ServerError" - /repositories/{repository}/branches/{branch}/objects/stage_allowed: parameters: - in: path diff --git a/esti/copy_test.go b/esti/copy_test.go index d1aeae1614b..ab73c1af5f5 100644 --- a/esti/copy_test.go +++ b/esti/copy_test.go @@ -31,7 +31,12 @@ func TestCopyObject(t *testing.T) { t.Run("copy_large_size_file", func(t *testing.T) { importPath := getImportPath(t) - importTestData(t, ctx, client, repo, importPath) + + testImportNew(t, ctx, repo, ingestionBranch, + []apigen.ImportLocation{{Path: importPath, Type: "common_prefix"}}, + map[string]string{"created_by": "import"}, + ) + res, err := client.StatObjectWithResponse(ctx, repo, ingestionBranch, &apigen.StatObjectParams{ Path: largeObject, }) @@ -50,7 +55,7 @@ func TestCopyObject(t *testing.T) { require.NoError(t, err, "failed to copy") require.NotNil(t, copyResp.JSON201) - // Verify creation path, date and physical address are different + // Verify the creation path, date and physical address are different copyStat := copyResp.JSON201 require.NotEqual(t, objStat.PhysicalAddress, copyStat.PhysicalAddress) require.GreaterOrEqual(t, copyStat.Mtime, objStat.Mtime) @@ -73,7 +78,12 @@ func TestCopyObject(t *testing.T) { t.Run("copy_large_size_file_abort", func(t *testing.T) { requireBlockstoreType(t, block.BlockstoreTypeAzure) importPath := strings.Replace(azureCopyDataPath, "esti", azureAbortAccount, 1) - importTestData(t, ctx, client, repo, importPath) + + testImportNew(t, ctx, repo, ingestionBranch, + []apigen.ImportLocation{{Path: importPath, Type: "common_prefix"}}, + map[string]string{"created_by": "import"}, + ) + res, err := client.StatObjectWithResponse(ctx, repo, ingestionBranch, &apigen.StatObjectParams{ Path: largeObject, }) @@ -130,52 +140,3 @@ func getImportPath(t *testing.T) string { } return importPath } - -func importTestData(t *testing.T, ctx context.Context, client apigen.ClientWithResponsesInterface, repoName, importPath string) { - var ( - after = "" - token *string - ranges []apigen.RangeMetadata - ) - for { - resp, err := client.IngestRangeWithResponse(ctx, repoName, apigen.IngestRangeJSONRequestBody{ - After: after, - ContinuationToken: token, - FromSourceURI: importPath, - }) - require.NoError(t, err, "failed to ingest range") - require.Equal(t, http.StatusCreated, resp.StatusCode()) - require.NotNil(t, resp.JSON201) - ranges = append(ranges, *resp.JSON201.Range) - if !resp.JSON201.Pagination.HasMore { - break - } - after = resp.JSON201.Pagination.LastKey - token = resp.JSON201.Pagination.ContinuationToken - } - - metarangeResp, err := client.CreateMetaRangeWithResponse(ctx, repoName, apigen.CreateMetaRangeJSONRequestBody{ - Ranges: ranges, - }) - - require.NoError(t, err, "failed to create metarange") - require.NotNil(t, metarangeResp.JSON201) - require.NotNil(t, metarangeResp.JSON201.Id) - - _, err = client.CreateBranchWithResponse(ctx, repoName, apigen.CreateBranchJSONRequestBody{ - Name: ingestionBranch, - Source: "main", - }) - require.NoError(t, err, "failed to create branch") - - commitResp, err := client.CommitWithResponse(ctx, repoName, ingestionBranch, &apigen.CommitParams{ - SourceMetarange: metarangeResp.JSON201.Id, - }, apigen.CommitJSONRequestBody{ - Message: "created by import", - Metadata: &apigen.CommitCreation_Metadata{ - AdditionalProperties: map[string]string{"created_by": "import"}, - }, - }) - require.NoError(t, err, "failed to commit") - require.NotNil(t, commitResp.JSON201) -} diff --git a/esti/import_test.go b/esti/import_test.go index dac9b3b2ead..a030c0c00c4 100644 --- a/esti/import_test.go +++ b/esti/import_test.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "net/http" - "net/url" "os" "path/filepath" "strconv" @@ -12,11 +11,9 @@ import ( "testing" "time" - "github.com/rs/xid" "github.com/spf13/viper" "github.com/stretchr/testify/require" "github.com/treeverse/lakefs/pkg/api/apigen" - "github.com/treeverse/lakefs/pkg/api/apiutil" "github.com/treeverse/lakefs/pkg/block" "github.com/treeverse/lakefs/pkg/catalog" "github.com/treeverse/lakefs/pkg/config" @@ -31,7 +28,6 @@ const ( azureImportPath = "https://esti.blob.core.windows.net/esti-system-testing-data/import-test-data/" importTargetPrefix = "imported/new-prefix/" importBranchBase = "ingestion" - adlsTestImportPath = "import-test-cases" ) var importFilesToCheck = []string{ @@ -73,29 +69,6 @@ func setupImportByBlockstoreType(t testing.TB) (string, string, int) { return blockstoreType, importPath, expectedContentLength } -func TestImport(t *testing.T) { - ctx, _, repoName := setupTest(t) - defer tearDownTest(repoName) - blockstoreType, importPath, expectedContentLength := setupImportByBlockstoreType(t) - - t.Run("default", func(t *testing.T) { - importBranch := fmt.Sprintf("%s-%s", importBranchBase, "default") - testImport(t, ctx, repoName, importPath, importBranch) - verifyImportObjects(t, ctx, repoName, importTargetPrefix, importBranch, importFilesToCheck, expectedContentLength) - }) - - t.Run("parent", func(t *testing.T) { - importBranch := fmt.Sprintf("%s-%s", importBranchBase, "parent") - if blockstoreType == block.BlockstoreTypeLocal { - t.Skip("local always assumes import path is dir") - } - // import without the directory separator as suffix to include the parent directory - importPathParent := strings.TrimSuffix(importPath, "/") - testImport(t, ctx, repoName, importPathParent, importBranch) - verifyImportObjects(t, ctx, repoName, importTargetPrefix+"import-test-data/", importBranch, importFilesToCheck, expectedContentLength) - }) -} - func setupLocalImportPath(t testing.TB) string { const dirPerm = 0o755 importDir := filepath.Join(t.TempDir(), "import-test-data") + "/" @@ -163,138 +136,7 @@ func verifyImportObjects(t testing.TB, ctx context.Context, repoName, prefix, im t.Log("Total objects imported:", count) } -func ingestRange(t testing.TB, ctx context.Context, repoName, importPath string) ([]apigen.RangeMetadata, string) { - var ( - after string - token *string - ranges []apigen.RangeMetadata - stagingToken string - ) - for { - resp, err := client.IngestRangeWithResponse(ctx, repoName, apigen.IngestRangeJSONRequestBody{ - After: after, - ContinuationToken: token, - FromSourceURI: importPath, - Prepend: importTargetPrefix, - }) - require.NoError(t, err, "failed to ingest range") - require.Equal(t, http.StatusCreated, resp.StatusCode()) - require.NotNil(t, resp.JSON201) - ranges = append(ranges, *resp.JSON201.Range) - stagingToken = apiutil.Value(resp.JSON201.Pagination.StagingToken) - if !resp.JSON201.Pagination.HasMore { - break - } - after = resp.JSON201.Pagination.LastKey - token = resp.JSON201.Pagination.ContinuationToken - } - return ranges, stagingToken -} - -func testImport(t testing.TB, ctx context.Context, repoName, importPath, importBranch string) { - ranges, stagingToken := ingestRange(t, ctx, repoName, importPath) - - metarangeResp, err := client.CreateMetaRangeWithResponse(ctx, repoName, apigen.CreateMetaRangeJSONRequestBody{ - Ranges: ranges, - }) - - require.NoError(t, err, "failed to create metarange") - require.Equal(t, http.StatusCreated, metarangeResp.StatusCode()) - require.NotNil(t, metarangeResp.JSON201.Id, "failed to create metarange") - - createResp, err := client.CreateBranchWithResponse(ctx, repoName, apigen.CreateBranchJSONRequestBody{ - Name: importBranch, - Source: "main", - }) - require.NoError(t, err, "failed to create branch", importBranch) - require.Equal(t, http.StatusCreated, createResp.StatusCode(), "failed to create branch", importBranch) - - commitResp, err := client.CommitWithResponse(ctx, repoName, importBranch, &apigen.CommitParams{ - SourceMetarange: metarangeResp.JSON201.Id, - }, apigen.CommitJSONRequestBody{ - Message: "created by import", - Metadata: &apigen.CommitCreation_Metadata{ - AdditionalProperties: map[string]string{"created_by": "import"}, - }, - }) - require.NoError(t, err, "failed to commit") - require.Equal(t, http.StatusCreated, commitResp.StatusCode(), "failed to commit") - - if stagingToken != "" { - commitResp, err = client.CommitWithResponse(ctx, repoName, importBranch, &apigen.CommitParams{}, apigen.CommitJSONRequestBody{ - Message: "created by import on skipped objects", - Metadata: &apigen.CommitCreation_Metadata{ - AdditionalProperties: map[string]string{"created_by": "import"}, - }, - }) - require.NoError(t, err, "failed to commit") - require.Equal(t, http.StatusCreated, commitResp.StatusCode(), "failed to commit") - } -} - -func TestAzureDataLakeV2(t *testing.T) { - importPrefix := viper.GetString("adls_import_base_url") - if importPrefix == "" { - t.Skip("No Azure data lake storage path prefix was given") - } - - ctx, _, repoName := setupTest(t) - defer tearDownTest(repoName) - - tests := []struct { - name string - prefix string - filesToCheck []string - }{ - { - name: "import-test-data", - prefix: "", - filesToCheck: importFilesToCheck, - }, - { - name: "empty-folders", - prefix: adlsTestImportPath, - filesToCheck: []string{}, - }, - { - name: "prefix-item-order", - prefix: adlsTestImportPath, - filesToCheck: []string{ - "aaa", - "helloworld.csv", - "zero", - "helloworld/myfile.csv", - }, - }, - //{ // Use this configuration to run import on big dataset of ~620,000 objects - // name: "adls-big-import", - // prefix: "", - // filesToCheck: []string{}, - //}, - } - - for _, tt := range tests { - importBranch := fmt.Sprintf("%s-%s", importBranchBase, tt.name) - // each test is a folder under the prefix import - t.Run(tt.name, func(t *testing.T) { - importPath, err := url.JoinPath(importPrefix, tt.prefix, tt.name) - if err != nil { - t.Fatal("Import URL", err) - } - testImport(t, ctx, repoName, importPath, importBranch) - if len(tt.filesToCheck) == 0 { - resp, err := client.ListObjectsWithResponse(ctx, repoName, importBranch, &apigen.ListObjectsParams{}) - require.NoError(t, err) - require.NotNil(t, resp.JSON200) - require.Empty(t, resp.JSON200.Results) - } else { - verifyImportObjects(t, ctx, repoName, filepath.Join(importTargetPrefix, tt.name)+"/", importBranch, tt.filesToCheck, 0) - } - }) - } -} - -func TestImportNew(t *testing.T) { +func TestImport(t *testing.T) { blockstoreType, importPath, expectedContentLength := setupImportByBlockstoreType(t) metadata := map[string]string{"created_by": "import"} @@ -350,7 +192,7 @@ func TestImportNew(t *testing.T) { Path: importPathParent, Type: catalog.ImportPathTypePrefix, }} - _ = testImportNew(t, ctx, repoName, branch, paths, &metadata) + _ = testImportNew(t, ctx, repoName, branch, paths, metadata) verifyImportObjects(t, ctx, repoName, importTargetPrefix+"import-test-data/", branch, importFilesToCheck, expectedContentLength) }) @@ -373,7 +215,7 @@ func TestImportNew(t *testing.T) { Type: catalog.ImportPathTypePrefix, }) - _ = testImportNew(t, ctx, repoName, branch, paths, &metadata) + _ = testImportNew(t, ctx, repoName, branch, paths, metadata) verifyImportObjects(t, ctx, repoName, importTargetPrefix, branch, importFilesToCheck, expectedContentLength) }) @@ -404,7 +246,7 @@ func TestImportNew(t *testing.T) { }) } -func testImportNew(t testing.TB, ctx context.Context, repoName, importBranch string, paths []apigen.ImportLocation, metadata *map[string]string) string { +func testImportNew(t testing.TB, ctx context.Context, repoName, importBranch string, paths []apigen.ImportLocation, metadata map[string]string) string { createResp, err := client.CreateBranchWithResponse(ctx, repoName, apigen.CreateBranchJSONRequestBody{ Name: importBranch, Source: "main", @@ -418,8 +260,8 @@ func testImportNew(t testing.TB, ctx context.Context, repoName, importBranch str }, Paths: paths, } - if metadata != nil { - body.Commit.Metadata = &apigen.CommitCreation_Metadata{AdditionalProperties: *metadata} + if len(metadata) > 0 { + body.Commit.Metadata = &apigen.CommitCreation_Metadata{AdditionalProperties: metadata} } importResp, err := client.ImportStartWithResponse(ctx, repoName, importBranch, body) @@ -431,29 +273,26 @@ func testImportNew(t testing.TB, ctx context.Context, repoName, importBranch str updateTime time.Time ) importID := importResp.JSON202.Id - ticker := time.NewTicker(5 * time.Second) + + ticker := time.NewTicker(2 * time.Second) defer ticker.Stop() - for { - select { - case <-ctx.Done(): - t.Fatalf("context canceled") - case <-ticker.C: - statusResp, err = client.ImportStatusWithResponse(ctx, repoName, importBranch, &apigen.ImportStatusParams{ - Id: importID, - }) - require.NoError(t, err) - require.NotNil(t, statusResp.JSON200, "failed to get import status", err) - status := statusResp.JSON200 - require.Nil(t, status.Error) - require.NotEqual(t, updateTime, status.UpdateTime) - updateTime = status.UpdateTime - t.Log("Import progress:", *status.IngestedObjects, importID) - } - if statusResp.JSON200.Completed { - t.Log("Import completed:", importID) - return importID + for range ticker.C { + statusResp, err = client.ImportStatusWithResponse(ctx, repoName, importBranch, &apigen.ImportStatusParams{ + Id: importID, + }) + require.NoError(t, err, "failed to get import status", importID) + require.NotNil(t, statusResp.JSON200, "failed to get import status", err) + status := statusResp.JSON200 + require.Nil(t, status.Error, "import failed", err) + require.NotEqual(t, updateTime, status.UpdateTime) + updateTime = status.UpdateTime + t.Log("Import progress:", *status.IngestedObjects, importID) + if status.Completed { + break } } + t.Log("Import completed:", importID) + return importID } func TestImportCancel(t *testing.T) { @@ -510,68 +349,3 @@ func TestImportCancel(t *testing.T) { timer.Reset(3 * time.Second) // Server updates status every 1 second - unless operation was canceled successfully } } - -// ##################################################################################################################### -// # # -// # BENCHMARKS # -// # # -// ##################################################################################################################### -func BenchmarkIngest_Azure(b *testing.B) { - requireBlockstoreType(b, block.BlockstoreTypeAzure) - ctx, _, repoName := setupTest(b) - defer tearDownTest(repoName) - - b.Run("alds_gen2_ingest", func(b *testing.B) { - importPrefix := viper.GetString("adls_import_base_url") - if importPrefix == "" { - b.Skip("No Azure data lake storage path prefix was given") - } - importPath, err := url.JoinPath(importPrefix, "import-test-data/") - if err != nil { - b.Fatal("Import URL", err) - } - benchmarkIngest(b, ctx, repoName, importPath) - }) - - b.Run("blob_storage_ingest", func(b *testing.B) { - benchmarkIngest(b, ctx, repoName, azureImportPath) - }) -} - -func benchmarkIngest(b *testing.B, ctx context.Context, repoName, importPath string) { - b.ResetTimer() - for n := 0; n < b.N; n++ { - ingestRange(b, ctx, repoName, importPath) - } -} - -func BenchmarkImport_Azure(b *testing.B) { - requireBlockstoreType(b, block.BlockstoreTypeAzure) - ctx, _, repoName := setupTest(b) - defer tearDownTest(repoName) - - b.Run("alds_gen2_import", func(b *testing.B) { - importPrefix := viper.GetString("adls_import_base_url") - if importPrefix == "" { - b.Skip("No Azure data lake storage path prefix was given") - } - importBranch := fmt.Sprintf("%s-%s", importBranchBase, makeRepositoryName(b.Name())) - importPath, err := url.JoinPath(importPrefix, "import-test-data/") - if err != nil { - b.Fatal("Import URL", err) - } - benchmarkImport(b, ctx, repoName, importPath, importBranch) - }) - - b.Run("blob_storage_import", func(b *testing.B) { - importBranch := fmt.Sprintf("%s-%s", importBranchBase, makeRepositoryName(b.Name())) - benchmarkImport(b, ctx, repoName, azureImportPath, importBranch) - }) -} - -func benchmarkImport(b *testing.B, ctx context.Context, repoName, importPath, importBranch string) { - b.ResetTimer() - for n := 0; n < b.N; n++ { - testImport(b, ctx, repoName, importPath, fmt.Sprintf("%s-%s", importBranch, xid.New().String())) - } -} diff --git a/pkg/api/controller.go b/pkg/api/controller.go index 98ce39fd24d..d9f995a2f12 100644 --- a/pkg/api/controller.go +++ b/pkg/api/controller.go @@ -2240,8 +2240,8 @@ func (c *Controller) ImportStart(w http.ResponseWriter, r *http.Request, body ap }) } -func importStatusToResponse(status *graveler.ImportStatus) apigen.ImportStatusResp { - resp := apigen.ImportStatusResp{ +func importStatusToResponse(status *graveler.ImportStatus) apigen.ImportStatus { + resp := apigen.ImportStatus{ Completed: status.Completed, IngestedObjects: &status.Progress, UpdateTime: status.UpdatedAt, @@ -2309,92 +2309,6 @@ func (c *Controller) ImportCancel(w http.ResponseWriter, r *http.Request, reposi writeResponse(w, r, http.StatusNoContent, nil) } -func (c *Controller) IngestRange(w http.ResponseWriter, r *http.Request, body apigen.IngestRangeJSONRequestBody, repository string) { - if !c.authorize(w, r, permissions.Node{ - Type: permissions.NodeTypeAnd, - Nodes: []permissions.Node{ - { - Permission: permissions.Permission{ - Action: permissions.ImportFromStorageAction, - Resource: permissions.StorageNamespace(body.FromSourceURI), - }, - }, - { - Permission: permissions.Permission{ - Action: permissions.WriteObjectAction, - Resource: permissions.ObjectArn(repository, body.Prepend), - }, - }, - }, - }) { - return - } - - ctx := r.Context() - c.LogAction(ctx, "ingest_range", r, repository, "", "") - - contToken := swag.StringValue(body.ContinuationToken) - stagingToken := swag.StringValue(body.StagingToken) - info, mark, err := c.Catalog.WriteRange(r.Context(), repository, catalog.WriteRangeRequest{ - SourceURI: body.FromSourceURI, - Prepend: body.Prepend, - After: body.After, - StagingToken: stagingToken, - ContinuationToken: contToken, - }) - if c.handleAPIError(ctx, w, r, err) { - return - } - - writeResponse(w, r, http.StatusCreated, apigen.IngestRangeCreationResponse{ - Range: &apigen.RangeMetadata{ - Id: string(info.ID), - MinKey: string(info.MinKey), - MaxKey: string(info.MaxKey), - Count: info.Count, - EstimatedSize: int(info.EstimatedRangeSizeBytes), - }, - Pagination: &apigen.ImportPagination{ - HasMore: mark.HasMore, - ContinuationToken: &mark.ContinuationToken, - LastKey: mark.LastKey, - StagingToken: &mark.StagingToken, - }, - }) -} - -func (c *Controller) CreateMetaRange(w http.ResponseWriter, r *http.Request, body apigen.CreateMetaRangeJSONRequestBody, repository string) { - if !c.authorize(w, r, permissions.Node{ - Permission: permissions.Permission{ - Action: permissions.CreateMetaRangeAction, - Resource: permissions.RepoArn(repository), - }, - }) { - return - } - - ctx := r.Context() - c.LogAction(ctx, "create_metarange", r, repository, "", "") - - ranges := make([]*graveler.RangeInfo, 0, len(body.Ranges)) - for _, r := range body.Ranges { - ranges = append(ranges, &graveler.RangeInfo{ - ID: graveler.RangeID(r.Id), - MinKey: graveler.Key(r.MinKey), - MaxKey: graveler.Key(r.MaxKey), - Count: r.Count, - EstimatedRangeSizeBytes: uint64(r.EstimatedSize), - }) - } - info, err := c.Catalog.WriteMetaRange(r.Context(), repository, ranges) - if c.handleAPIError(ctx, w, r, err) { - return - } - writeResponse(w, r, http.StatusCreated, apigen.MetaRangeCreationResponse{ - Id: swag.String(string(info.ID)), - }) -} - func (c *Controller) Commit(w http.ResponseWriter, r *http.Request, body apigen.CommitJSONRequestBody, repository, branch string, params apigen.CommitParams) { if !c.authorize(w, r, permissions.Node{ Permission: permissions.Permission{ diff --git a/pkg/api/controller_test.go b/pkg/api/controller_test.go index af787ff2bd1..5800c94d04c 100644 --- a/pkg/api/controller_test.go +++ b/pkg/api/controller_test.go @@ -35,7 +35,6 @@ import ( "github.com/treeverse/lakefs/pkg/auth" "github.com/treeverse/lakefs/pkg/block" "github.com/treeverse/lakefs/pkg/catalog" - "github.com/treeverse/lakefs/pkg/catalog/testutils" "github.com/treeverse/lakefs/pkg/config" "github.com/treeverse/lakefs/pkg/graveler" "github.com/treeverse/lakefs/pkg/httputil" @@ -147,8 +146,8 @@ func TestController_ListRepositoriesHandler(t *testing.T) { t.Run("paginate repos after", func(t *testing.T) { // write some repos resp, err := clt.ListRepositoriesWithResponse(ctx, &apigen.ListRepositoriesParams{ - After: apiutil.Ptr(apigen.PaginationAfter("foo2")), - Amount: apiutil.Ptr(apigen.PaginationAmount(2)), + After: apiutil.Ptr[apigen.PaginationAfter]("foo2"), + Amount: apiutil.Ptr[apigen.PaginationAmount](2), }) if err != nil { t.Fatal(err) @@ -1194,7 +1193,7 @@ func TestController_ListBranchesHandler(t *testing.T) { _, err := deps.catalog.CreateRepository(ctx, repo, onBlock(deps, "foo2"), "main") testutil.Must(t, err) - // create first dummy commit on main so that we can create branches from it + // create the first "dummy" commit on main so that we can create branches from it testutil.Must(t, deps.catalog.CreateEntry(ctx, repo, "main", catalog.DBEntry{Path: "a/b"})) _, err = deps.catalog.Commit(ctx, repo, "main", "first commit", "test", nil, nil, nil) testutil.Must(t, err) @@ -1213,8 +1212,8 @@ func TestController_ListBranchesHandler(t *testing.T) { } resp, err = clt.ListBranchesWithResponse(ctx, repo, &apigen.ListBranchesParams{ - After: apiutil.Ptr(apigen.PaginationAfter("main1")), - Amount: apiutil.Ptr(apigen.PaginationAmount(2)), + After: apiutil.Ptr[apigen.PaginationAfter]("main1"), + Amount: apiutil.Ptr[apigen.PaginationAmount](2), }) verifyResponseOK(t, resp, err) results := resp.JSON200.Results @@ -1334,7 +1333,7 @@ func TestController_GetBranchHandler(t *testing.T) { testutil.Must(t, err) t.Run("get default branch", func(t *testing.T) { - // create first dummy commit on main so that we can create branches from it + // create the first "dummy" commit on main so that we can create branches from it testutil.Must(t, deps.catalog.CreateEntry(ctx, repo, testBranch, catalog.DBEntry{Path: "a/b"})) _, err = deps.catalog.Commit(ctx, repo, testBranch, "first commit", "test", nil, nil, nil) testutil.Must(t, err) @@ -1819,194 +1818,6 @@ func TestController_DeleteBranchHandler(t *testing.T) { }) } -func TestController_IngestRangeHandler(t *testing.T) { - const ( - fromSourceURI = "https://valid.uri" - uriPrefix = "take/from/here" - fromSourceURIWithPrefix = fromSourceURI + "/" + uriPrefix - after = "some/key/to/start/after" - prepend = "some/logical/prefix" - ) - - const continuationToken = "opaque" - - t.Run("ingest directory marker", func(t *testing.T) { - ctx := context.Background() - w := testutils.NewFakeWalker(0, 1, uriPrefix, after, continuationToken, fromSourceURIWithPrefix, nil) - w.Entries = []block.ObjectStoreEntry{ - { - RelativeKey: "", - FullKey: uriPrefix + "/", - Address: fromSourceURIWithPrefix + "/", - ETag: "dir_etag", - Size: 0, - }, - } - clt, deps := setupClientWithAdminAndWalkerFactory(t, testutils.FakeFactory{Walker: w}) - _, err := deps.catalog.CreateRepository(ctx, "repo-dir-marker", onBlock(deps, "foo2"), "main") - testutil.Must(t, err) - - resp, err := clt.IngestRangeWithResponse(ctx, "repo-dir-marker", apigen.IngestRangeJSONRequestBody{ - FromSourceURI: fromSourceURIWithPrefix, - ContinuationToken: swag.String(continuationToken), - After: after, - }) - verifyResponseOK(t, resp, err) - require.NotNil(t, resp.JSON201.Range) - require.NotNil(t, resp.JSON201.Pagination) - require.Equal(t, 1, resp.JSON201.Range.Count) - require.Equal(t, resp.JSON201.Range.MinKey, "") - require.Equal(t, resp.JSON201.Range.MaxKey, "") - require.False(t, resp.JSON201.Pagination.HasMore) - require.Empty(t, resp.JSON201.Pagination.LastKey) - require.Empty(t, resp.JSON201.Pagination.ContinuationToken) - }) - - t.Run("successful ingestion no pagination", func(t *testing.T) { - ctx := context.Background() - repo := testUniqueRepoName() - count := 1000 - clt, w := func(t *testing.T, count int, expectedErr error) (apigen.ClientWithResponsesInterface, *testutils.FakeWalker) { - t.Helper() - ctx := context.Background() - - w := testutils.NewFakeWalker(count, count, uriPrefix, after, continuationToken, fromSourceURIWithPrefix, expectedErr) - clt, deps := setupClientWithAdminAndWalkerFactory(t, testutils.FakeFactory{Walker: w}) - - // setup test data - _, err := deps.catalog.CreateRepository(ctx, repo, onBlock(deps, "foo1"), "main") - testutil.Must(t, err) - - return clt, w - }(t, count, nil) - - resp, err := clt.IngestRangeWithResponse(ctx, repo, apigen.IngestRangeJSONRequestBody{ - After: after, - FromSourceURI: fromSourceURIWithPrefix, - Prepend: prepend, - ContinuationToken: swag.String(continuationToken), - }) - - verifyResponseOK(t, resp, err) - require.NotNil(t, resp.JSON201.Range) - require.NotNil(t, resp.JSON201.Pagination) - require.Equal(t, count, resp.JSON201.Range.Count) - require.Equal(t, strings.Replace(w.Entries[0].FullKey, uriPrefix, prepend, 1), resp.JSON201.Range.MinKey) - require.Equal(t, strings.Replace(w.Entries[count-1].FullKey, uriPrefix, prepend, 1), resp.JSON201.Range.MaxKey) - require.False(t, resp.JSON201.Pagination.HasMore) - require.Empty(t, resp.JSON201.Pagination.LastKey) - require.Empty(t, resp.JSON201.Pagination.ContinuationToken) - }) - - t.Run("successful ingestion with pagination", func(t *testing.T) { - // force splitting the range before - ctx := context.Background() - repo := testUniqueRepoName() - count := 200_000 - clt, w := func(t *testing.T, count int, expectedErr error) (apigen.ClientWithResponsesInterface, *testutils.FakeWalker) { - t.Helper() - ctx := context.Background() - - w := testutils.NewFakeWalker(count, count, uriPrefix, after, continuationToken, fromSourceURIWithPrefix, expectedErr) - clt, deps := setupClientWithAdminAndWalkerFactory(t, testutils.FakeFactory{Walker: w}) - - // setup test data - _, err := deps.catalog.CreateRepository(ctx, repo, onBlock(deps, "foo1"), "main") - testutil.Must(t, err) - - return clt, w - }(t, count, nil) - - resp, err := clt.IngestRangeWithResponse(ctx, repo, apigen.IngestRangeJSONRequestBody{ - After: after, - FromSourceURI: fromSourceURIWithPrefix, - Prepend: prepend, - ContinuationToken: swag.String(continuationToken), - }) - - verifyResponseOK(t, resp, err) - require.NotNil(t, resp.JSON201.Range) - require.NotNil(t, resp.JSON201.Pagination) - require.Less(t, resp.JSON201.Range.Count, count) - require.Equal(t, strings.Replace(w.Entries[0].FullKey, uriPrefix, prepend, 1), resp.JSON201.Range.MinKey) - require.Equal(t, strings.Replace(w.Entries[resp.JSON201.Range.Count-1].FullKey, uriPrefix, prepend, 1), resp.JSON201.Range.MaxKey) - require.True(t, resp.JSON201.Pagination.HasMore) - require.Equal(t, w.Entries[resp.JSON201.Range.Count-1].FullKey, resp.JSON201.Pagination.LastKey) - require.Equal(t, testutils.ContinuationTokenOpaque, *resp.JSON201.Pagination.ContinuationToken) - }) - - t.Run("error during walk", func(t *testing.T) { - // force splitting the range before - ctx := context.Background() - repo := testUniqueRepoName() - count := 10 - expectedErr := errors.New("failed reading for object store") - clt, _ := func(t *testing.T, count int, expectedErr error) (apigen.ClientWithResponsesInterface, *testutils.FakeWalker) { - t.Helper() - ctx := context.Background() - - w := testutils.NewFakeWalker(count, count, uriPrefix, after, continuationToken, fromSourceURIWithPrefix, expectedErr) - clt, deps := setupClientWithAdminAndWalkerFactory(t, testutils.FakeFactory{Walker: w}) - - // setup test data - _, err := deps.catalog.CreateRepository(ctx, repo, onBlock(deps, "foo1"), "main") - testutil.Must(t, err) - - return clt, w - }(t, count, expectedErr) - - resp, err := clt.IngestRangeWithResponse(ctx, repo, apigen.IngestRangeJSONRequestBody{ - After: after, - FromSourceURI: fromSourceURIWithPrefix, - Prepend: prepend, - ContinuationToken: swag.String(continuationToken), - }) - - require.NoError(t, err) - require.Equal(t, http.StatusInternalServerError, resp.StatusCode()) - require.Contains(t, string(resp.Body), expectedErr.Error()) - }) -} - -func TestController_WriteMetaRangeHandler(t *testing.T) { - ctx := context.Background() - clt, deps := setupClientWithAdmin(t) - repo := testUniqueRepoName() - // setup test data - _, err := deps.catalog.CreateRepository(ctx, repo, onBlock(deps, repo), "main") - testutil.Must(t, err) - - t.Run("successful metarange creation", func(t *testing.T) { - resp, err := clt.CreateMetaRangeWithResponse(ctx, repo, apigen.CreateMetaRangeJSONRequestBody{ - Ranges: []apigen.RangeMetadata{ - {Count: 11355, EstimatedSize: 123465897, Id: "FirstRangeID", MaxKey: "1", MinKey: "2"}, - {Count: 13123, EstimatedSize: 123465897, Id: "SecondRangeID", MaxKey: "3", MinKey: "4"}, - {Count: 10123, EstimatedSize: 123465897, Id: "ThirdRangeID", MaxKey: "5", MinKey: "6"}, - }, - }) - - verifyResponseOK(t, resp, err) - require.NotNil(t, resp.JSON201) - require.NotNil(t, resp.JSON201.Id) - require.NotEmpty(t, *resp.JSON201.Id) - - respMR, err := clt.GetMetaRangeWithResponse(ctx, repo, *resp.JSON201.Id) - verifyResponseOK(t, respMR, err) - require.NotNil(t, respMR.JSON200) - require.NotEmpty(t, respMR.JSON200.Location) - }) - - t.Run("missing ranges", func(t *testing.T) { - resp, err := clt.CreateMetaRangeWithResponse(ctx, repo, apigen.CreateMetaRangeJSONRequestBody{ - Ranges: []apigen.RangeMetadata{}, - }) - - require.NoError(t, err) - require.NotNil(t, resp.JSON400) - require.Equal(t, http.StatusBadRequest, resp.StatusCode()) - }) -} - func TestController_ObjectsStatObjectHandler(t *testing.T) { clt, deps := setupClientWithAdmin(t) ctx := context.Background() @@ -3466,7 +3277,7 @@ func TestController_Revert(t *testing.T) { }) t.Run("dirty_branch", func(t *testing.T) { - // create branch with entry without commit + // create branch with entry without a commit createBranch, err := deps.catalog.CreateBranch(ctx, repo, "dirty", "main") testutil.Must(t, err) err = deps.catalog.CreateEntry(ctx, repo, "dirty", catalog.DBEntry{Path: "foo/bar2", PhysicalAddress: "bar2addr", CreationDate: time.Now(), Size: 1, Checksum: "cksum2"}) @@ -3621,7 +3432,7 @@ func TestController_CherryPick(t *testing.T) { }) t.Run("dirty branch", func(t *testing.T) { - // create branch with entry without commit + // create branch with entry without a commit _, err := deps.catalog.CreateBranch(ctx, repo, "dirty", "main") testutil.Must(t, err) err = deps.catalog.CreateEntry(ctx, repo, "dirty", catalog.DBEntry{Path: "foo/bar5", PhysicalAddress: "bar50addr", CreationDate: time.Now(), Size: 5, Checksum: "cksum5"}) @@ -3991,7 +3802,7 @@ func TestController_ClientDisconnect(t *testing.T) { t.Fatal("Expected to request complete without error, expected to fail") } - // wait for server to identify we left and update the counter + // wait for the server to identify we left and update the counter time.Sleep(time.Second) // request for metrics @@ -4270,7 +4081,7 @@ func TestController_CopyObjectHandler(t *testing.T) { }) verifyResponseOK(t, copyResp, err) - // Verify creation path, date and physical address are different + // Verify the creation path, date and physical address are different copyStat := copyResp.JSON201 require.NotNil(t, copyStat) require.NotEqual(t, objStat.PhysicalAddress, copyStat.PhysicalAddress) @@ -4297,7 +4108,7 @@ func TestController_CopyObjectHandler(t *testing.T) { }) verifyResponseOK(t, copyResp, err) - // Verify creation path, date and physical address are different + // Verify the creation path, date and physical address are different copyStat := copyResp.JSON201 require.NotNil(t, copyStat) require.NotEmpty(t, copyStat.PhysicalAddress) @@ -4337,7 +4148,7 @@ func TestController_CopyObjectHandler(t *testing.T) { }) verifyResponseOK(t, copyResp, err) - // Verify creation path, date and physical address are different + // Verify the creation path, date and physical address are different copyStat := copyResp.JSON201 require.NotNil(t, copyStat) require.NotEmpty(t, copyStat.PhysicalAddress) @@ -4566,7 +4377,7 @@ func TestController_BranchProtectionRules(t *testing.T) { t.Fatalf("CreateBranchProtectionRulePreflightWithResponse expected %d, got %d", tc.expectedHttpStatus, respPreflight.StatusCode()) } - // result of an actual call to the endpoint should have the same result + // the result of an actual call to the endpoint should have the same result resp, err := tc.clt.CreateBranchProtectionRuleWithResponse(currCtx, repo, apigen.CreateBranchProtectionRuleJSONRequestBody{ Pattern: "main", }) @@ -4625,7 +4436,7 @@ func TestController_GarbageCollectionRules(t *testing.T) { t.Fatalf("SetGarbageCollectionRulesPreflightWithResponse expected %d, got %d", tc.expectedHttpStatus, respPreflight.StatusCode()) } - // result of an actual call to the endpoint should have the same result + // the result of an actual call to the endpoint should have the same result resp, err := tc.clt.SetGarbageCollectionRulesWithResponse(currCtx, repo, apigen.SetGarbageCollectionRulesJSONRequestBody{ Branches: []apigen.GarbageCollectionRule{{BranchId: "main", RetentionDays: 1}}, DefaultRetentionDays: 5, }) diff --git a/webui/src/lib/api/index.js b/webui/src/lib/api/index.js index 172de1c4c53..c6e00c03388 100644 --- a/webui/src/lib/api/index.js +++ b/webui/src/lib/api/index.js @@ -992,10 +992,8 @@ class BranchProtectionRules { async createRulePreflight(repoID) { const response = await apiRequest(`/repositories/${encodeURIComponent(repoID)}/branch_protection/set_allowed`); - if (response.status !== 204) { - return false; - } - return true; + return response.status === 204; + } async createRule(repoID, pattern) { @@ -1023,32 +1021,6 @@ class BranchProtectionRules { } -class Ranges { - async createRange(repoID, fromSourceURI, after, prepend, continuation_token = "", staging_token="") { - const response = await apiRequest(`/repositories/${repoID}/branches/ranges`, { - method: 'POST', - body: JSON.stringify({fromSourceURI, after, prepend, continuation_token, staging_token}), - }); - if (response.status !== 201) { - throw new Error(await extractError(response)); - } - return response.json(); - } -} - -class MetaRanges { - async createMetaRange(repoID, ranges) { - const response = await apiRequest(`/repositories/${repoID}/branches/metaranges`, { - method: 'POST', - body: JSON.stringify({ranges}), - }); - if (response.status !== 201) { - throw new Error(await extractError(response)); - } - return response.json(); - } -} - class Templates { async expandTemplate(templateLocation, params) { const urlParams = new URLSearchParams(); @@ -1179,8 +1151,6 @@ export const actions = new Actions(); export const retention = new Retention(); export const config = new Config(); export const branchProtectionRules = new BranchProtectionRules(); -export const ranges = new Ranges(); -export const metaRanges = new MetaRanges(); export const templates = new Templates(); export const statistics = new Statistics(); export const staging = new Staging();