diff --git a/.gitallowed b/.gitallowed
index 50b018b1e53..163237e84da 100644
--- a/.gitallowed
+++ b/.gitallowed
@@ -2,5 +2,6 @@ AKIAIO5FODNN7EXAMPLE
AKIAIOSFODNN3EXAMPLE
AKIAIOSFODNN7EXAMPLE
AKIAJVHTOKZWGCD2QQYQ
+AKIAJZZZZZZZZZZZZZZQ
wJalrXUtnFEMI/K3MDENG/bPxRfiCYEXAMPLEKEY
wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
diff --git a/.github/workflows/compatibility-tests.yaml b/.github/workflows/compatibility-tests.yaml
index 31f514274f9..024bcc308fe 100644
--- a/.github/workflows/compatibility-tests.yaml
+++ b/.github/workflows/compatibility-tests.yaml
@@ -121,7 +121,7 @@ jobs:
matrix:
# Removing a version from this list means the published client is no longer compatible with
# that lakeFS version.
- lakefs_version: [ 0.89.1, 0.90.0, 0.91.0, 0.92.0, 0.93.0, 0.94.1, 0.95.0, 0.96.0, 0.96.1, 0.97.4, 0.97.5, 0.98.0, 0.99.0, 0.100.0, 0.101.0, 0.102.0, 0.102.1, 0.102.2, 0.103.0, 0.104.0, 0.105.0, 0.106.2, 0.107.0, 0.107.1, 0.108.0 ]
+ lakefs_version: [ 0.89.1, 0.90.0, 0.91.0, 0.92.0, 0.93.0, 0.94.1, 0.95.0, 0.96.0, 0.96.1, 0.97.4, 0.97.5, 0.98.0, 0.99.0, 0.100.0, 0.101.0, 0.102.0, 0.102.1, 0.102.2, 0.103.0, 0.104.0, 0.105.0, 0.106.2, 0.107.0, 0.107.1, 0.108.0, 0.109.0, 0.110.0 ]
runs-on: ubuntu-20.04
env:
TAG: ${{ matrix.lakefs_version }}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index fd8a8bf3537..e408118c779 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,9 +2,20 @@
## UNRELEASED
+# v0.110.0
+
:new: What's new:
-- Restrict lakectl local to common prefixes only (#6510)
-- Added URL parser to lua runtime (#6597)
+- Upgrade code to use AWS SDK go v2 (#6486)
+- Actions: Lua package to parse URL (#6597)
+- UI: Commit info consistent across screens (#6593)
+- API: Update APIs tag for internal use (#6582)
+- lakectl: dbt and metastore commands marked as deprecated (#6565)
+- Removed deprecated: expand template API (#6587)
+- Removed deprecated: remove the update and forgot password APIs (#6591)
+
+:bug: Bug fixed:
+- Fix lakectl local verify bad path error on Windows (#6602)
+- Fix Azure pre-signed URL for blob imported from different account (#6594)
# v0.109.0
diff --git a/api/swagger.yml b/api/swagger.yml
index 9ebc23a52eb..3b9ebba8be6 100644
--- a/api/swagger.yml
+++ b/api/swagger.yml
@@ -5,7 +5,7 @@ info:
title: lakeFS API
license:
name: "Apache 2.0"
- url: http://www.apache.org/licenses/LICENSE-2.0.html
+ url: https://www.apache.org/licenses/LICENSE-2.0.html
version: 0.1.0
servers:
@@ -151,25 +151,6 @@ components:
minimum: 0
description: Maximal number of entries per page
- ImportPagination:
- type: object
- required:
- - has_more
- - last_key
- properties:
- has_more:
- type: boolean
- description: More keys to be ingested.
- continuation_token:
- type: string
- description: Opaque. Token used to import the next range.
- last_key:
- type: string
- description: Last object store key that was ingested.
- staging_token:
- type: string
- description: Staging token for skipped objects during ingest
-
Repository:
type: object
required:
@@ -290,9 +271,9 @@ components:
type: integer
format: int64
description: |
- If present and nonzero, physical_address is a presigned URL and
+ If present and nonzero, physical_address is a pre-signed URL and
will expire at this Unix Epoch time. This will be shorter than
- the presigned URL lifetime if an authentication token is about
+ the pre-signed URL lifetime if an authentication token is about
to expire.
This field is *optional*.
@@ -691,15 +672,13 @@ components:
properties:
id:
type: string
- description: a unique identifier for the user. In password-based authentication, this is the email.
+ description: a unique identifier for the user.
creation_date:
type: integer
format: int64
description: Unix Epoch in seconds
friendly_name:
type: string
- email:
- type: string
CurrentUser:
type: object
@@ -709,20 +688,12 @@ components:
user:
$ref: "#/components/schemas/User"
- ForgotPasswordRequest:
- type: object
- required:
- - email
- properties:
- email:
- type: string
-
UserCreation:
type: object
properties:
id:
type: string
- description: a unique identifier for the user. In password-based authentication, this is the email.
+ description: a unique identifier for the user.
invite_user:
type: boolean
required:
@@ -821,21 +792,6 @@ components:
- featureUpdates
- securityUpdates
- UpdatePasswordByToken:
- type: object
- properties:
- token:
- description: token used for authentication
- type: string
- newPassword:
- description: new password to update
- type: string
- email:
- description: optional user email to match the token for verification
- type: string
- required:
- - token
- - newPassword
Credentials:
type: object
@@ -1157,14 +1113,14 @@ components:
presigned_url:
type: string
nullable: true
- description: if presign=true is passed in the request, this field will contain a presigned URL to use when uploading
+ description: if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading
presigned_url_expiry:
type: integer
format: int64
description: |
- If present and nonzero, physical_address is a presigned URL and
+ If present and nonzero, physical_address is a pre-signed URL and
will expire at this Unix Epoch time. This will be shorter than
- the presigned URL lifetime if an authentication token is about
+ the pre-signed URL lifetime if an authentication token is about
to expire.
This field is *optional*.
@@ -1292,7 +1248,7 @@ components:
description: Path type, can either be 'common_prefix' or 'object'
path:
type: string
- description: A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type.
+ description: A source location to import path or to a single object. Must match the lakeFS installation blockstore type.
example: s3://my-bucket/production/collections/
destination:
type: string
@@ -1320,32 +1276,6 @@ components:
destination: collections/file1
type: object
- StageRangeCreation:
- type: object
- required:
- - fromSourceURI
- - after
- - prepend
- properties:
- fromSourceURI:
- type: string
- description: The source location of the ingested files. Must match the lakeFS installation blockstore type.
- example: s3://my-bucket/production/collections/
- after:
- type: string
- description: Only objects after this key would be ingested.
- example: production/collections/some/file.parquet
- prepend:
- type: string
- description: A prefix to prepend to ingested objects.
- example: collections/
- continuation_token:
- type: string
- description: Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key.
- staging_token:
- type: string
- description: Opaque. Client should pass staging_token if received from server on previous request
-
RangeMetadata:
type: object
required:
@@ -1374,15 +1304,7 @@ components:
type: integer
description: Estimated size of the range in bytes
- IngestRangeCreationResponse:
- type: object
- properties:
- range:
- $ref: "#/components/schemas/RangeMetadata"
- pagination:
- $ref: "#/components/schemas/ImportPagination"
-
- ImportStatusResp:
+ ImportStatus:
type: object
properties:
completed:
@@ -1587,48 +1509,7 @@ paths:
default:
$ref: "#/components/responses/ServerError"
- /auth/password:
- post:
- tags:
- - auth
- operationId: updatePassword
- summary: Update user password by reset_password token
- security:
- - cookie_auth: []
- requestBody:
- required: true
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/UpdatePasswordByToken"
- responses:
- 201:
- description: successful reset
- 401:
- $ref: "#/components/responses/Unauthorized"
- default:
- $ref: "#/components/responses/ServerError"
- /auth/password/forgot:
- post:
- tags:
- - auth
- operationId: forgotPassword
- summary: forgot password request initiates the password reset process
- security: []
- requestBody:
- required: true
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/ForgotPasswordRequest"
- responses:
- 204:
- description: No content
- 400:
- $ref: "#/components/responses/BadRequest"
- default:
- $ref: "#/components/responses/ServerError"
/auth/capabilities:
get:
@@ -1955,7 +1836,7 @@ paths:
- $ref: "#/components/parameters/PaginationAmount"
responses:
200:
- description: group memeber list
+ description: group member list
content:
application/json:
schema:
@@ -3476,7 +3357,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/ImportStatusResp"
+ $ref: "#/components/schemas/ImportStatus"
401:
$ref: "#/components/responses/Unauthorized"
404:
@@ -3535,114 +3416,6 @@ paths:
default:
$ref: "#/components/responses/ServerError"
- /repositories/{repository}/branches/metaranges:
- parameters:
- - in: path
- name: repository
- required: true
- schema:
- type: string
- post:
- tags:
- - import
- operationId: createMetaRange
- summary: create a lakeFS metarange file from the given ranges
- requestBody:
- required: true
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/MetaRangeCreation"
- responses:
- 201:
- description: metarange metadata
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/MetaRangeCreationResponse"
- 400:
- $ref: "#/components/responses/ValidationError"
- 401:
- $ref: "#/components/responses/Unauthorized"
- 403:
- $ref: "#/components/responses/Forbidden"
- 404:
- $ref: "#/components/responses/NotFound"
- default:
- $ref: "#/components/responses/ServerError"
-
- /repositories/{repository}/branches/ranges:
- parameters:
- - in: path
- name: repository
- required: true
- schema:
- type: string
- post:
- tags:
- - import
- operationId: ingestRange
- summary: create a lakeFS range file from the source uri
- requestBody:
- required: true
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/StageRangeCreation"
- responses:
- 201:
- description: range metadata
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/IngestRangeCreationResponse"
-
- 400:
- $ref: "#/components/responses/ValidationError"
- 401:
- $ref: "#/components/responses/Unauthorized"
- 404:
- $ref: "#/components/responses/NotFound"
- default:
- $ref: "#/components/responses/ServerError"
-
- /repositories/{repository}/branches/{branch}/update_token:
- parameters:
- - in: path
- name: repository
- required: true
- schema:
- type: string
- - in: path
- name: branch
- required: true
- schema:
- type: string
- put:
- tags:
- - internal
- operationId: updateBranchToken
- summary: modify branch staging token
- requestBody:
- required: true
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/UpdateToken"
- responses:
- 204:
- description: branch updated successfully
- 400:
- $ref: "#/components/responses/ValidationError"
- 401:
- $ref: "#/components/responses/Unauthorized"
- 403:
- $ref: "#/components/responses/Forbidden"
- 404:
- $ref: "#/components/responses/NotFound"
- default:
- $ref: "#/components/responses/ServerError"
-
/repositories/{repository}/branches/{branch}/objects/stage_allowed:
parameters:
- in: path
diff --git a/clients/java/README.md b/clients/java/README.md
index e16dc594fc8..441a6dcf064 100644
--- a/clients/java/README.md
+++ b/clients/java/README.md
@@ -153,7 +153,6 @@ Class | Method | HTTP request | Description
*AuthApi* | [**deleteUser**](docs/AuthApi.md#deleteUser) | **DELETE** /auth/users/{userId} | delete user
*AuthApi* | [**detachPolicyFromGroup**](docs/AuthApi.md#detachPolicyFromGroup) | **DELETE** /auth/groups/{groupId}/policies/{policyId} | detach policy from group
*AuthApi* | [**detachPolicyFromUser**](docs/AuthApi.md#detachPolicyFromUser) | **DELETE** /auth/users/{userId}/policies/{policyId} | detach policy from user
-*AuthApi* | [**forgotPassword**](docs/AuthApi.md#forgotPassword) | **POST** /auth/password/forgot | forgot password request initiates the password reset process
*AuthApi* | [**getCredentials**](docs/AuthApi.md#getCredentials) | **GET** /auth/users/{userId}/credentials/{accessKeyId} | get credentials
*AuthApi* | [**getCurrentUser**](docs/AuthApi.md#getCurrentUser) | **GET** /user | get current user
*AuthApi* | [**getGroup**](docs/AuthApi.md#getGroup) | **GET** /auth/groups/{groupId} | get group
@@ -170,7 +169,6 @@ Class | Method | HTTP request | Description
*AuthApi* | [**listUsers**](docs/AuthApi.md#listUsers) | **GET** /auth/users | list users
*AuthApi* | [**login**](docs/AuthApi.md#login) | **POST** /auth/login | perform a login
*AuthApi* | [**setGroupACL**](docs/AuthApi.md#setGroupACL) | **POST** /auth/groups/{groupId}/acl | set ACL of group
-*AuthApi* | [**updatePassword**](docs/AuthApi.md#updatePassword) | **POST** /auth/password | Update user password by reset_password token
*AuthApi* | [**updatePolicy**](docs/AuthApi.md#updatePolicy) | **PUT** /auth/policies/{policyId} | update policy
*BranchesApi* | [**cherryPick**](docs/BranchesApi.md#cherryPick) | **POST** /repositories/{repository}/branches/{branch}/cherry-pick | Replay the changes from the given commit on the branch
*BranchesApi* | [**createBranch**](docs/BranchesApi.md#createBranch) | **POST** /repositories/{repository}/branches | create branch
@@ -188,11 +186,9 @@ Class | Method | HTTP request | Description
*ExperimentalApi* | [**getOtfDiffs**](docs/ExperimentalApi.md#getOtfDiffs) | **GET** /otf/diffs | get the available Open Table Format diffs
*ExperimentalApi* | [**otfDiff**](docs/ExperimentalApi.md#otfDiff) | **GET** /repositories/{repository}/otf/refs/{left_ref}/diff/{right_ref} | perform otf diff
*HealthCheckApi* | [**healthCheck**](docs/HealthCheckApi.md#healthCheck) | **GET** /healthcheck |
-*ImportApi* | [**createMetaRange**](docs/ImportApi.md#createMetaRange) | **POST** /repositories/{repository}/branches/metaranges | create a lakeFS metarange file from the given ranges
*ImportApi* | [**importCancel**](docs/ImportApi.md#importCancel) | **DELETE** /repositories/{repository}/branches/{branch}/import | cancel ongoing import
*ImportApi* | [**importStart**](docs/ImportApi.md#importStart) | **POST** /repositories/{repository}/branches/{branch}/import | import data from object store
*ImportApi* | [**importStatus**](docs/ImportApi.md#importStatus) | **GET** /repositories/{repository}/branches/{branch}/import | get import status
-*ImportApi* | [**ingestRange**](docs/ImportApi.md#ingestRange) | **POST** /repositories/{repository}/branches/ranges | create a lakeFS range file from the source uri
*InternalApi* | [**createBranchProtectionRulePreflight**](docs/InternalApi.md#createBranchProtectionRulePreflight) | **GET** /repositories/{repository}/branch_protection/set_allowed |
*InternalApi* | [**getAuthCapabilities**](docs/InternalApi.md#getAuthCapabilities) | **GET** /auth/capabilities | list authentication capabilities supported
*InternalApi* | [**getSetupState**](docs/InternalApi.md#getSetupState) | **GET** /setup_lakefs | check if the lakeFS installation is already set up
@@ -200,7 +196,6 @@ Class | Method | HTTP request | Description
*InternalApi* | [**setGarbageCollectionRulesPreflight**](docs/InternalApi.md#setGarbageCollectionRulesPreflight) | **GET** /repositories/{repository}/gc/rules/set_allowed |
*InternalApi* | [**setup**](docs/InternalApi.md#setup) | **POST** /setup_lakefs | setup lakeFS and create a first user
*InternalApi* | [**setupCommPrefs**](docs/InternalApi.md#setupCommPrefs) | **POST** /setup_comm_prefs | setup communications preferences
-*InternalApi* | [**updateBranchToken**](docs/InternalApi.md#updateBranchToken) | **PUT** /repositories/{repository}/branches/{branch}/update_token | modify branch staging token
*InternalApi* | [**uploadObjectPreflight**](docs/InternalApi.md#uploadObjectPreflight) | **GET** /repositories/{repository}/branches/{branch}/objects/stage_allowed |
*MetadataApi* | [**createSymlinkFile**](docs/MetadataApi.md#createSymlinkFile) | **POST** /repositories/{repository}/refs/{branch}/symlink | creates symlink files corresponding to the given directory
*MetadataApi* | [**getMetaRange**](docs/MetadataApi.md#getMetaRange) | **GET** /repositories/{repository}/metadata/meta_range/{meta_range} | return URI to a meta-range file
@@ -267,7 +262,6 @@ Class | Method | HTTP request | Description
- [Error](docs/Error.md)
- [ErrorNoACL](docs/ErrorNoACL.md)
- [FindMergeBaseResult](docs/FindMergeBaseResult.md)
- - [ForgotPasswordRequest](docs/ForgotPasswordRequest.md)
- [GarbageCollectionConfig](docs/GarbageCollectionConfig.md)
- [GarbageCollectionPrepareRequest](docs/GarbageCollectionPrepareRequest.md)
- [GarbageCollectionPrepareResponse](docs/GarbageCollectionPrepareResponse.md)
@@ -281,9 +275,7 @@ Class | Method | HTTP request | Description
- [ImportCreation](docs/ImportCreation.md)
- [ImportCreationResponse](docs/ImportCreationResponse.md)
- [ImportLocation](docs/ImportLocation.md)
- - [ImportPagination](docs/ImportPagination.md)
- - [ImportStatusResp](docs/ImportStatusResp.md)
- - [IngestRangeCreationResponse](docs/IngestRangeCreationResponse.md)
+ - [ImportStatus](docs/ImportStatus.md)
- [InlineObject](docs/InlineObject.md)
- [InlineObject1](docs/InlineObject1.md)
- [LoginConfig](docs/LoginConfig.md)
@@ -318,7 +310,6 @@ Class | Method | HTTP request | Description
- [RevertCreation](docs/RevertCreation.md)
- [Setup](docs/Setup.md)
- [SetupState](docs/SetupState.md)
- - [StageRangeCreation](docs/StageRangeCreation.md)
- [StagingLocation](docs/StagingLocation.md)
- [StagingMetadata](docs/StagingMetadata.md)
- [Statement](docs/Statement.md)
@@ -328,7 +319,6 @@ Class | Method | HTTP request | Description
- [StorageURI](docs/StorageURI.md)
- [TagCreation](docs/TagCreation.md)
- [UnderlyingObjectProperties](docs/UnderlyingObjectProperties.md)
- - [UpdatePasswordByToken](docs/UpdatePasswordByToken.md)
- [UpdateToken](docs/UpdateToken.md)
- [User](docs/User.md)
- [UserCreation](docs/UserCreation.md)
diff --git a/clients/java/api/openapi.yaml b/clients/java/api/openapi.yaml
index e4085851a66..1c53abe4a5d 100644
--- a/clients/java/api/openapi.yaml
+++ b/clients/java/api/openapi.yaml
@@ -3,7 +3,7 @@ info:
description: lakeFS HTTP API
license:
name: Apache 2.0
- url: http://www.apache.org/licenses/LICENSE-2.0.html
+ url: https://www.apache.org/licenses/LICENSE-2.0.html
title: lakeFS API
version: 0.1.0
servers:
@@ -166,67 +166,6 @@ paths:
- auth
x-contentType: application/json
x-accepts: application/json
- /auth/password:
- post:
- operationId: updatePassword
- requestBody:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/UpdatePasswordByToken'
- required: true
- responses:
- "201":
- description: successful reset
- "401":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Unauthorized
- default:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Internal Server Error
- security:
- - cookie_auth: []
- summary: Update user password by reset_password token
- tags:
- - auth
- x-contentType: application/json
- x-accepts: application/json
- /auth/password/forgot:
- post:
- operationId: forgotPassword
- requestBody:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/ForgotPasswordRequest'
- required: true
- responses:
- "204":
- description: No content
- "400":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Bad Request
- default:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Internal Server Error
- security: []
- summary: forgot password request initiates the password reset process
- tags:
- - auth
- x-contentType: application/json
- x-accepts: application/json
/auth/capabilities:
get:
operationId: getAuthCapabilities
@@ -855,7 +794,7 @@ paths:
application/json:
schema:
$ref: '#/components/schemas/UserList'
- description: group memeber list
+ description: group member list
"401":
content:
application/json:
@@ -3687,7 +3626,7 @@ paths:
content:
application/json:
schema:
- $ref: '#/components/schemas/ImportStatusResp'
+ $ref: '#/components/schemas/ImportStatus'
description: import status
"401":
content:
@@ -3770,180 +3709,6 @@ paths:
- import
x-contentType: application/json
x-accepts: application/json
- /repositories/{repository}/branches/metaranges:
- post:
- operationId: createMetaRange
- parameters:
- - explode: false
- in: path
- name: repository
- required: true
- schema:
- type: string
- style: simple
- requestBody:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/MetaRangeCreation'
- required: true
- responses:
- "201":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/MetaRangeCreationResponse'
- description: metarange metadata
- "400":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Validation Error
- "401":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Unauthorized
- "403":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Forbidden
- "404":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Resource Not Found
- default:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Internal Server Error
- summary: create a lakeFS metarange file from the given ranges
- tags:
- - import
- x-contentType: application/json
- x-accepts: application/json
- /repositories/{repository}/branches/ranges:
- post:
- operationId: ingestRange
- parameters:
- - explode: false
- in: path
- name: repository
- required: true
- schema:
- type: string
- style: simple
- requestBody:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/StageRangeCreation'
- required: true
- responses:
- "201":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/IngestRangeCreationResponse'
- description: range metadata
- "400":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Validation Error
- "401":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Unauthorized
- "404":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Resource Not Found
- default:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Internal Server Error
- summary: create a lakeFS range file from the source uri
- tags:
- - import
- x-contentType: application/json
- x-accepts: application/json
- /repositories/{repository}/branches/{branch}/update_token:
- put:
- operationId: updateBranchToken
- parameters:
- - explode: false
- in: path
- name: repository
- required: true
- schema:
- type: string
- style: simple
- - explode: false
- in: path
- name: branch
- required: true
- schema:
- type: string
- style: simple
- requestBody:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/UpdateToken'
- required: true
- responses:
- "204":
- description: branch updated successfully
- "400":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Validation Error
- "401":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Unauthorized
- "403":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Forbidden
- "404":
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Resource Not Found
- default:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/Error'
- description: Internal Server Error
- summary: modify branch staging token
- tags:
- - internal
- x-contentType: application/json
- x-accepts: application/json
/repositories/{repository}/branches/{branch}/objects/stage_allowed:
get:
operationId: uploadObjectPreflight
@@ -5669,29 +5434,6 @@ components:
- next_offset
- results
type: object
- ImportPagination:
- example:
- continuation_token: continuation_token
- staging_token: staging_token
- has_more: true
- last_key: last_key
- properties:
- has_more:
- description: More keys to be ingested.
- type: boolean
- continuation_token:
- description: Opaque. Token used to import the next range.
- type: string
- last_key:
- description: Last object store key that was ingested.
- type: string
- staging_token:
- description: Staging token for skipped objects during ingest
- type: string
- required:
- - has_more
- - last_key
- type: object
Repository:
example:
default_branch: default_branch
@@ -5844,9 +5586,9 @@ components:
type: string
physical_address_expiry:
description: |
- If present and nonzero, physical_address is a presigned URL and
+ If present and nonzero, physical_address is a pre-signed URL and
will expire at this Unix Epoch time. This will be shorter than
- the presigned URL lifetime if an authentication token is about
+ the pre-signed URL lifetime if an authentication token is about
to expire.
This field is *optional*.
@@ -6431,11 +6173,9 @@ components:
friendly_name: friendly_name
id: id
creation_date: 0
- email: email
properties:
id:
- description: a unique identifier for the user. In password-based authentication,
- this is the email.
+ description: a unique identifier for the user.
type: string
creation_date:
description: Unix Epoch in seconds
@@ -6443,8 +6183,6 @@ components:
type: integer
friendly_name:
type: string
- email:
- type: string
required:
- creation_date
- id
@@ -6455,30 +6193,19 @@ components:
friendly_name: friendly_name
id: id
creation_date: 0
- email: email
properties:
user:
$ref: '#/components/schemas/User'
required:
- user
type: object
- ForgotPasswordRequest:
- example:
- email: email
- properties:
- email:
- type: string
- required:
- - email
- type: object
UserCreation:
example:
invite_user: true
id: id
properties:
id:
- description: a unique identifier for the user. In password-based authentication,
- this is the email.
+ description: a unique identifier for the user.
type: string
invite_user:
type: boolean
@@ -6611,25 +6338,6 @@ components:
- featureUpdates
- securityUpdates
type: object
- UpdatePasswordByToken:
- example:
- newPassword: newPassword
- email: email
- token: token
- properties:
- token:
- description: token used for authentication
- type: string
- newPassword:
- description: new password to update
- type: string
- email:
- description: optional user email to match the token for verification
- type: string
- required:
- - newPassword
- - token
- type: object
Credentials:
example:
access_key_id: access_key_id
@@ -6746,11 +6454,9 @@ components:
- friendly_name: friendly_name
id: id
creation_date: 0
- email: email
- friendly_name: friendly_name
id: id
creation_date: 0
- email: email
properties:
pagination:
$ref: '#/components/schemas/Pagination'
@@ -7122,14 +6828,14 @@ components:
type: string
presigned_url:
description: if presign=true is passed in the request, this field will contain
- a presigned URL to use when uploading
+ a pre-signed URL to use when uploading
nullable: true
type: string
presigned_url_expiry:
description: |
- If present and nonzero, physical_address is a presigned URL and
+ If present and nonzero, physical_address is a pre-signed URL and
will expire at this Unix Epoch time. This will be shorter than
- the presigned URL lifetime if an authentication token is about
+ the pre-signed URL lifetime if an authentication token is about
to expire.
This field is *optional*.
@@ -7288,7 +6994,7 @@ components:
- object
type: string
path:
- description: A source location to ingested path or to a single object. Must
+ description: A source location to import path or to a single object. Must
match the lakeFS installation blockstore type.
example: s3://my-bucket/production/collections/
type: string
@@ -7321,47 +7027,7 @@ components:
- commit
- paths
type: object
- StageRangeCreation:
- example:
- fromSourceURI: s3://my-bucket/production/collections/
- continuation_token: continuation_token
- staging_token: staging_token
- prepend: collections/
- after: production/collections/some/file.parquet
- properties:
- fromSourceURI:
- description: The source location of the ingested files. Must match the lakeFS
- installation blockstore type.
- example: s3://my-bucket/production/collections/
- type: string
- after:
- description: Only objects after this key would be ingested.
- example: production/collections/some/file.parquet
- type: string
- prepend:
- description: A prefix to prepend to ingested objects.
- example: collections/
- type: string
- continuation_token:
- description: Opaque. Client should pass the continuation_token received
- from server to continue creation ranges from the same key.
- type: string
- staging_token:
- description: Opaque. Client should pass staging_token if received from server
- on previous request
- type: string
- required:
- - after
- - fromSourceURI
- - prepend
- type: object
RangeMetadata:
- example:
- max_key: production/collections/some/file_8229.parquet
- count: 0
- estimated_size: 6
- id: 480e19972a6fbe98ab8e81ae5efdfd1a29037587e91244e87abd4adefffdb01c
- min_key: production/collections/some/file_1.parquet
properties:
id:
description: ID of the range.
@@ -7388,26 +7054,7 @@ components:
- max_key
- min_key
type: object
- IngestRangeCreationResponse:
- example:
- pagination:
- continuation_token: continuation_token
- staging_token: staging_token
- has_more: true
- last_key: last_key
- range:
- max_key: production/collections/some/file_8229.parquet
- count: 0
- estimated_size: 6
- id: 480e19972a6fbe98ab8e81ae5efdfd1a29037587e91244e87abd4adefffdb01c
- min_key: production/collections/some/file_1.parquet
- properties:
- range:
- $ref: '#/components/schemas/RangeMetadata'
- pagination:
- $ref: '#/components/schemas/ImportPagination'
- type: object
- ImportStatusResp:
+ ImportStatus:
example:
update_time: 2000-01-23T04:56:07.000+00:00
metarange_id: metarange_id
@@ -7457,18 +7104,6 @@ components:
- id
type: object
MetaRangeCreation:
- example:
- ranges:
- - max_key: production/collections/some/file_8229.parquet
- count: 0
- estimated_size: 6
- id: 480e19972a6fbe98ab8e81ae5efdfd1a29037587e91244e87abd4adefffdb01c
- min_key: production/collections/some/file_1.parquet
- - max_key: production/collections/some/file_8229.parquet
- count: 0
- estimated_size: 6
- id: 480e19972a6fbe98ab8e81ae5efdfd1a29037587e91244e87abd4adefffdb01c
- min_key: production/collections/some/file_1.parquet
properties:
ranges:
items:
@@ -7479,16 +7114,12 @@ components:
- ranges
type: object
MetaRangeCreationResponse:
- example:
- id: id
properties:
id:
description: The id of the created metarange
type: string
type: object
UpdateToken:
- example:
- staging_token: staging_token
properties:
staging_token:
type: string
diff --git a/clients/java/docs/AuthApi.md b/clients/java/docs/AuthApi.md
index 02dd8998e57..955bdd8a725 100644
--- a/clients/java/docs/AuthApi.md
+++ b/clients/java/docs/AuthApi.md
@@ -18,7 +18,6 @@ Method | HTTP request | Description
[**deleteUser**](AuthApi.md#deleteUser) | **DELETE** /auth/users/{userId} | delete user
[**detachPolicyFromGroup**](AuthApi.md#detachPolicyFromGroup) | **DELETE** /auth/groups/{groupId}/policies/{policyId} | detach policy from group
[**detachPolicyFromUser**](AuthApi.md#detachPolicyFromUser) | **DELETE** /auth/users/{userId}/policies/{policyId} | detach policy from user
-[**forgotPassword**](AuthApi.md#forgotPassword) | **POST** /auth/password/forgot | forgot password request initiates the password reset process
[**getCredentials**](AuthApi.md#getCredentials) | **GET** /auth/users/{userId}/credentials/{accessKeyId} | get credentials
[**getCurrentUser**](AuthApi.md#getCurrentUser) | **GET** /user | get current user
[**getGroup**](AuthApi.md#getGroup) | **GET** /auth/groups/{groupId} | get group
@@ -35,7 +34,6 @@ Method | HTTP request | Description
[**listUsers**](AuthApi.md#listUsers) | **GET** /auth/users | list users
[**login**](AuthApi.md#login) | **POST** /auth/login | perform a login
[**setGroupACL**](AuthApi.md#setGroupACL) | **POST** /auth/groups/{groupId}/acl | set ACL of group
-[**updatePassword**](AuthApi.md#updatePassword) | **POST** /auth/password | Update user password by reset_password token
[**updatePolicy**](AuthApi.md#updatePolicy) | **PUT** /auth/policies/{policyId} | update policy
@@ -1319,67 +1317,6 @@ null (empty response body)
**404** | Resource Not Found | - |
**0** | Internal Server Error | - |
-
-# **forgotPassword**
-> forgotPassword(forgotPasswordRequest)
-
-forgot password request initiates the password reset process
-
-### Example
-```java
-// Import classes:
-import io.lakefs.clients.api.ApiClient;
-import io.lakefs.clients.api.ApiException;
-import io.lakefs.clients.api.Configuration;
-import io.lakefs.clients.api.models.*;
-import io.lakefs.clients.api.AuthApi;
-
-public class Example {
- public static void main(String[] args) {
- ApiClient defaultClient = Configuration.getDefaultApiClient();
- defaultClient.setBasePath("http://localhost/api/v1");
-
- AuthApi apiInstance = new AuthApi(defaultClient);
- ForgotPasswordRequest forgotPasswordRequest = new ForgotPasswordRequest(); // ForgotPasswordRequest |
- try {
- apiInstance.forgotPassword(forgotPasswordRequest);
- } catch (ApiException e) {
- System.err.println("Exception when calling AuthApi#forgotPassword");
- System.err.println("Status code: " + e.getCode());
- System.err.println("Reason: " + e.getResponseBody());
- System.err.println("Response headers: " + e.getResponseHeaders());
- e.printStackTrace();
- }
- }
-}
-```
-
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
- **forgotPasswordRequest** | [**ForgotPasswordRequest**](ForgotPasswordRequest.md)| |
-
-### Return type
-
-null (empty response body)
-
-### Authorization
-
-No authorization required
-
-### HTTP request headers
-
- - **Content-Type**: application/json
- - **Accept**: application/json
-
-### HTTP response details
-| Status code | Description | Response headers |
-|-------------|-------------|------------------|
-**204** | No content | - |
-**400** | Bad Request | - |
-**0** | Internal Server Error | - |
-
# **getCredentials**
> Credentials getCredentials(userId, accessKeyId)
@@ -2013,7 +1950,7 @@ Name | Type | Description | Notes
### HTTP response details
| Status code | Description | Response headers |
|-------------|-------------|------------------|
-**200** | group memeber list | - |
+**200** | group member list | - |
**401** | Unauthorized | - |
**0** | Internal Server Error | - |
@@ -2843,74 +2780,6 @@ null (empty response body)
**404** | Resource Not Found | - |
**0** | Internal Server Error | - |
-
-# **updatePassword**
-> updatePassword(updatePasswordByToken)
-
-Update user password by reset_password token
-
-### Example
-```java
-// Import classes:
-import io.lakefs.clients.api.ApiClient;
-import io.lakefs.clients.api.ApiException;
-import io.lakefs.clients.api.Configuration;
-import io.lakefs.clients.api.auth.*;
-import io.lakefs.clients.api.models.*;
-import io.lakefs.clients.api.AuthApi;
-
-public class Example {
- public static void main(String[] args) {
- ApiClient defaultClient = Configuration.getDefaultApiClient();
- defaultClient.setBasePath("http://localhost/api/v1");
-
- // Configure API key authorization: cookie_auth
- ApiKeyAuth cookie_auth = (ApiKeyAuth) defaultClient.getAuthentication("cookie_auth");
- cookie_auth.setApiKey("YOUR API KEY");
- // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null)
- //cookie_auth.setApiKeyPrefix("Token");
-
- AuthApi apiInstance = new AuthApi(defaultClient);
- UpdatePasswordByToken updatePasswordByToken = new UpdatePasswordByToken(); // UpdatePasswordByToken |
- try {
- apiInstance.updatePassword(updatePasswordByToken);
- } catch (ApiException e) {
- System.err.println("Exception when calling AuthApi#updatePassword");
- System.err.println("Status code: " + e.getCode());
- System.err.println("Reason: " + e.getResponseBody());
- System.err.println("Response headers: " + e.getResponseHeaders());
- e.printStackTrace();
- }
- }
-}
-```
-
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
- **updatePasswordByToken** | [**UpdatePasswordByToken**](UpdatePasswordByToken.md)| |
-
-### Return type
-
-null (empty response body)
-
-### Authorization
-
-[cookie_auth](../README.md#cookie_auth)
-
-### HTTP request headers
-
- - **Content-Type**: application/json
- - **Accept**: application/json
-
-### HTTP response details
-| Status code | Description | Response headers |
-|-------------|-------------|------------------|
-**201** | successful reset | - |
-**401** | Unauthorized | - |
-**0** | Internal Server Error | - |
-
# **updatePolicy**
> Policy updatePolicy(policyId, policy)
diff --git a/clients/java/docs/ForgotPasswordRequest.md b/clients/java/docs/ForgotPasswordRequest.md
deleted file mode 100644
index 075dbc408d0..00000000000
--- a/clients/java/docs/ForgotPasswordRequest.md
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-# ForgotPasswordRequest
-
-
-## Properties
-
-Name | Type | Description | Notes
------------- | ------------- | ------------- | -------------
-**email** | **String** | |
-
-
-
diff --git a/clients/java/docs/ImportApi.md b/clients/java/docs/ImportApi.md
index 1153902fe38..1746e39662f 100644
--- a/clients/java/docs/ImportApi.md
+++ b/clients/java/docs/ImportApi.md
@@ -4,108 +4,11 @@ All URIs are relative to *http://localhost/api/v1*
Method | HTTP request | Description
------------- | ------------- | -------------
-[**createMetaRange**](ImportApi.md#createMetaRange) | **POST** /repositories/{repository}/branches/metaranges | create a lakeFS metarange file from the given ranges
[**importCancel**](ImportApi.md#importCancel) | **DELETE** /repositories/{repository}/branches/{branch}/import | cancel ongoing import
[**importStart**](ImportApi.md#importStart) | **POST** /repositories/{repository}/branches/{branch}/import | import data from object store
[**importStatus**](ImportApi.md#importStatus) | **GET** /repositories/{repository}/branches/{branch}/import | get import status
-[**ingestRange**](ImportApi.md#ingestRange) | **POST** /repositories/{repository}/branches/ranges | create a lakeFS range file from the source uri
-
-# **createMetaRange**
-> MetaRangeCreationResponse createMetaRange(repository, metaRangeCreation)
-
-create a lakeFS metarange file from the given ranges
-
-### Example
-```java
-// Import classes:
-import io.lakefs.clients.api.ApiClient;
-import io.lakefs.clients.api.ApiException;
-import io.lakefs.clients.api.Configuration;
-import io.lakefs.clients.api.auth.*;
-import io.lakefs.clients.api.models.*;
-import io.lakefs.clients.api.ImportApi;
-
-public class Example {
- public static void main(String[] args) {
- ApiClient defaultClient = Configuration.getDefaultApiClient();
- defaultClient.setBasePath("http://localhost/api/v1");
-
- // Configure HTTP basic authorization: basic_auth
- HttpBasicAuth basic_auth = (HttpBasicAuth) defaultClient.getAuthentication("basic_auth");
- basic_auth.setUsername("YOUR USERNAME");
- basic_auth.setPassword("YOUR PASSWORD");
-
- // Configure API key authorization: cookie_auth
- ApiKeyAuth cookie_auth = (ApiKeyAuth) defaultClient.getAuthentication("cookie_auth");
- cookie_auth.setApiKey("YOUR API KEY");
- // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null)
- //cookie_auth.setApiKeyPrefix("Token");
-
- // Configure HTTP bearer authorization: jwt_token
- HttpBearerAuth jwt_token = (HttpBearerAuth) defaultClient.getAuthentication("jwt_token");
- jwt_token.setBearerToken("BEARER TOKEN");
-
- // Configure API key authorization: oidc_auth
- ApiKeyAuth oidc_auth = (ApiKeyAuth) defaultClient.getAuthentication("oidc_auth");
- oidc_auth.setApiKey("YOUR API KEY");
- // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null)
- //oidc_auth.setApiKeyPrefix("Token");
-
- // Configure API key authorization: saml_auth
- ApiKeyAuth saml_auth = (ApiKeyAuth) defaultClient.getAuthentication("saml_auth");
- saml_auth.setApiKey("YOUR API KEY");
- // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null)
- //saml_auth.setApiKeyPrefix("Token");
-
- ImportApi apiInstance = new ImportApi(defaultClient);
- String repository = "repository_example"; // String |
- MetaRangeCreation metaRangeCreation = new MetaRangeCreation(); // MetaRangeCreation |
- try {
- MetaRangeCreationResponse result = apiInstance.createMetaRange(repository, metaRangeCreation);
- System.out.println(result);
- } catch (ApiException e) {
- System.err.println("Exception when calling ImportApi#createMetaRange");
- System.err.println("Status code: " + e.getCode());
- System.err.println("Reason: " + e.getResponseBody());
- System.err.println("Response headers: " + e.getResponseHeaders());
- e.printStackTrace();
- }
- }
-}
-```
-
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
- **repository** | **String**| |
- **metaRangeCreation** | [**MetaRangeCreation**](MetaRangeCreation.md)| |
-
-### Return type
-
-[**MetaRangeCreationResponse**](MetaRangeCreationResponse.md)
-
-### Authorization
-
-[basic_auth](../README.md#basic_auth), [cookie_auth](../README.md#cookie_auth), [jwt_token](../README.md#jwt_token), [oidc_auth](../README.md#oidc_auth), [saml_auth](../README.md#saml_auth)
-
-### HTTP request headers
-
- - **Content-Type**: application/json
- - **Accept**: application/json
-
-### HTTP response details
-| Status code | Description | Response headers |
-|-------------|-------------|------------------|
-**201** | metarange metadata | - |
-**400** | Validation Error | - |
-**401** | Unauthorized | - |
-**403** | Forbidden | - |
-**404** | Resource Not Found | - |
-**0** | Internal Server Error | - |
-
# **importCancel**
> importCancel(repository, branch, id)
@@ -300,7 +203,7 @@ Name | Type | Description | Notes
# **importStatus**
-> ImportStatusResp importStatus(repository, branch, id)
+> ImportStatus importStatus(repository, branch, id)
get import status
@@ -351,7 +254,7 @@ public class Example {
String branch = "branch_example"; // String |
String id = "id_example"; // String | Unique identifier of the import process
try {
- ImportStatusResp result = apiInstance.importStatus(repository, branch, id);
+ ImportStatus result = apiInstance.importStatus(repository, branch, id);
System.out.println(result);
} catch (ApiException e) {
System.err.println("Exception when calling ImportApi#importStatus");
@@ -374,7 +277,7 @@ Name | Type | Description | Notes
### Return type
-[**ImportStatusResp**](ImportStatusResp.md)
+[**ImportStatus**](ImportStatus.md)
### Authorization
@@ -393,97 +296,3 @@ Name | Type | Description | Notes
**404** | Resource Not Found | - |
**0** | Internal Server Error | - |
-
-# **ingestRange**
-> IngestRangeCreationResponse ingestRange(repository, stageRangeCreation)
-
-create a lakeFS range file from the source uri
-
-### Example
-```java
-// Import classes:
-import io.lakefs.clients.api.ApiClient;
-import io.lakefs.clients.api.ApiException;
-import io.lakefs.clients.api.Configuration;
-import io.lakefs.clients.api.auth.*;
-import io.lakefs.clients.api.models.*;
-import io.lakefs.clients.api.ImportApi;
-
-public class Example {
- public static void main(String[] args) {
- ApiClient defaultClient = Configuration.getDefaultApiClient();
- defaultClient.setBasePath("http://localhost/api/v1");
-
- // Configure HTTP basic authorization: basic_auth
- HttpBasicAuth basic_auth = (HttpBasicAuth) defaultClient.getAuthentication("basic_auth");
- basic_auth.setUsername("YOUR USERNAME");
- basic_auth.setPassword("YOUR PASSWORD");
-
- // Configure API key authorization: cookie_auth
- ApiKeyAuth cookie_auth = (ApiKeyAuth) defaultClient.getAuthentication("cookie_auth");
- cookie_auth.setApiKey("YOUR API KEY");
- // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null)
- //cookie_auth.setApiKeyPrefix("Token");
-
- // Configure HTTP bearer authorization: jwt_token
- HttpBearerAuth jwt_token = (HttpBearerAuth) defaultClient.getAuthentication("jwt_token");
- jwt_token.setBearerToken("BEARER TOKEN");
-
- // Configure API key authorization: oidc_auth
- ApiKeyAuth oidc_auth = (ApiKeyAuth) defaultClient.getAuthentication("oidc_auth");
- oidc_auth.setApiKey("YOUR API KEY");
- // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null)
- //oidc_auth.setApiKeyPrefix("Token");
-
- // Configure API key authorization: saml_auth
- ApiKeyAuth saml_auth = (ApiKeyAuth) defaultClient.getAuthentication("saml_auth");
- saml_auth.setApiKey("YOUR API KEY");
- // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null)
- //saml_auth.setApiKeyPrefix("Token");
-
- ImportApi apiInstance = new ImportApi(defaultClient);
- String repository = "repository_example"; // String |
- StageRangeCreation stageRangeCreation = new StageRangeCreation(); // StageRangeCreation |
- try {
- IngestRangeCreationResponse result = apiInstance.ingestRange(repository, stageRangeCreation);
- System.out.println(result);
- } catch (ApiException e) {
- System.err.println("Exception when calling ImportApi#ingestRange");
- System.err.println("Status code: " + e.getCode());
- System.err.println("Reason: " + e.getResponseBody());
- System.err.println("Response headers: " + e.getResponseHeaders());
- e.printStackTrace();
- }
- }
-}
-```
-
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
- **repository** | **String**| |
- **stageRangeCreation** | [**StageRangeCreation**](StageRangeCreation.md)| |
-
-### Return type
-
-[**IngestRangeCreationResponse**](IngestRangeCreationResponse.md)
-
-### Authorization
-
-[basic_auth](../README.md#basic_auth), [cookie_auth](../README.md#cookie_auth), [jwt_token](../README.md#jwt_token), [oidc_auth](../README.md#oidc_auth), [saml_auth](../README.md#saml_auth)
-
-### HTTP request headers
-
- - **Content-Type**: application/json
- - **Accept**: application/json
-
-### HTTP response details
-| Status code | Description | Response headers |
-|-------------|-------------|------------------|
-**201** | range metadata | - |
-**400** | Validation Error | - |
-**401** | Unauthorized | - |
-**404** | Resource Not Found | - |
-**0** | Internal Server Error | - |
-
diff --git a/clients/java/docs/ImportLocation.md b/clients/java/docs/ImportLocation.md
index fdabf7bae94..bbea056cd4e 100644
--- a/clients/java/docs/ImportLocation.md
+++ b/clients/java/docs/ImportLocation.md
@@ -8,7 +8,7 @@
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**type** | [**TypeEnum**](#TypeEnum) | Path type, can either be 'common_prefix' or 'object' |
-**path** | **String** | A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type. |
+**path** | **String** | A source location to import path or to a single object. Must match the lakeFS installation blockstore type. |
**destination** | **String** | Destination for the imported objects on the branch |
diff --git a/clients/java/docs/ImportPagination.md b/clients/java/docs/ImportPagination.md
deleted file mode 100644
index 42c4f46255f..00000000000
--- a/clients/java/docs/ImportPagination.md
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-# ImportPagination
-
-
-## Properties
-
-Name | Type | Description | Notes
------------- | ------------- | ------------- | -------------
-**hasMore** | **Boolean** | More keys to be ingested. |
-**continuationToken** | **String** | Opaque. Token used to import the next range. | [optional]
-**lastKey** | **String** | Last object store key that was ingested. |
-**stagingToken** | **String** | Staging token for skipped objects during ingest | [optional]
-
-
-
diff --git a/clients/java/docs/ImportStatusResp.md b/clients/java/docs/ImportStatus.md
similarity index 95%
rename from clients/java/docs/ImportStatusResp.md
rename to clients/java/docs/ImportStatus.md
index 28c4c44ad65..e184f352672 100644
--- a/clients/java/docs/ImportStatusResp.md
+++ b/clients/java/docs/ImportStatus.md
@@ -1,6 +1,6 @@
-# ImportStatusResp
+# ImportStatus
## Properties
diff --git a/clients/java/docs/IngestRangeCreationResponse.md b/clients/java/docs/IngestRangeCreationResponse.md
deleted file mode 100644
index b55853a2f01..00000000000
--- a/clients/java/docs/IngestRangeCreationResponse.md
+++ /dev/null
@@ -1,14 +0,0 @@
-
-
-# IngestRangeCreationResponse
-
-
-## Properties
-
-Name | Type | Description | Notes
------------- | ------------- | ------------- | -------------
-**range** | [**RangeMetadata**](RangeMetadata.md) | | [optional]
-**pagination** | [**ImportPagination**](ImportPagination.md) | | [optional]
-
-
-
diff --git a/clients/java/docs/InternalApi.md b/clients/java/docs/InternalApi.md
index ba761086508..9471c000d80 100644
--- a/clients/java/docs/InternalApi.md
+++ b/clients/java/docs/InternalApi.md
@@ -11,7 +11,6 @@ Method | HTTP request | Description
[**setGarbageCollectionRulesPreflight**](InternalApi.md#setGarbageCollectionRulesPreflight) | **GET** /repositories/{repository}/gc/rules/set_allowed |
[**setup**](InternalApi.md#setup) | **POST** /setup_lakefs | setup lakeFS and create a first user
[**setupCommPrefs**](InternalApi.md#setupCommPrefs) | **POST** /setup_comm_prefs | setup communications preferences
-[**updateBranchToken**](InternalApi.md#updateBranchToken) | **PUT** /repositories/{repository}/branches/{branch}/update_token | modify branch staging token
[**uploadObjectPreflight**](InternalApi.md#uploadObjectPreflight) | **GET** /repositories/{repository}/branches/{branch}/objects/stage_allowed |
@@ -525,102 +524,6 @@ No authorization required
**412** | wrong setup state for this operation | - |
**0** | Internal Server Error | - |
-
-# **updateBranchToken**
-> updateBranchToken(repository, branch, updateToken)
-
-modify branch staging token
-
-### Example
-```java
-// Import classes:
-import io.lakefs.clients.api.ApiClient;
-import io.lakefs.clients.api.ApiException;
-import io.lakefs.clients.api.Configuration;
-import io.lakefs.clients.api.auth.*;
-import io.lakefs.clients.api.models.*;
-import io.lakefs.clients.api.InternalApi;
-
-public class Example {
- public static void main(String[] args) {
- ApiClient defaultClient = Configuration.getDefaultApiClient();
- defaultClient.setBasePath("http://localhost/api/v1");
-
- // Configure HTTP basic authorization: basic_auth
- HttpBasicAuth basic_auth = (HttpBasicAuth) defaultClient.getAuthentication("basic_auth");
- basic_auth.setUsername("YOUR USERNAME");
- basic_auth.setPassword("YOUR PASSWORD");
-
- // Configure API key authorization: cookie_auth
- ApiKeyAuth cookie_auth = (ApiKeyAuth) defaultClient.getAuthentication("cookie_auth");
- cookie_auth.setApiKey("YOUR API KEY");
- // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null)
- //cookie_auth.setApiKeyPrefix("Token");
-
- // Configure HTTP bearer authorization: jwt_token
- HttpBearerAuth jwt_token = (HttpBearerAuth) defaultClient.getAuthentication("jwt_token");
- jwt_token.setBearerToken("BEARER TOKEN");
-
- // Configure API key authorization: oidc_auth
- ApiKeyAuth oidc_auth = (ApiKeyAuth) defaultClient.getAuthentication("oidc_auth");
- oidc_auth.setApiKey("YOUR API KEY");
- // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null)
- //oidc_auth.setApiKeyPrefix("Token");
-
- // Configure API key authorization: saml_auth
- ApiKeyAuth saml_auth = (ApiKeyAuth) defaultClient.getAuthentication("saml_auth");
- saml_auth.setApiKey("YOUR API KEY");
- // Uncomment the following line to set a prefix for the API key, e.g. "Token" (defaults to null)
- //saml_auth.setApiKeyPrefix("Token");
-
- InternalApi apiInstance = new InternalApi(defaultClient);
- String repository = "repository_example"; // String |
- String branch = "branch_example"; // String |
- UpdateToken updateToken = new UpdateToken(); // UpdateToken |
- try {
- apiInstance.updateBranchToken(repository, branch, updateToken);
- } catch (ApiException e) {
- System.err.println("Exception when calling InternalApi#updateBranchToken");
- System.err.println("Status code: " + e.getCode());
- System.err.println("Reason: " + e.getResponseBody());
- System.err.println("Response headers: " + e.getResponseHeaders());
- e.printStackTrace();
- }
- }
-}
-```
-
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
- **repository** | **String**| |
- **branch** | **String**| |
- **updateToken** | [**UpdateToken**](UpdateToken.md)| |
-
-### Return type
-
-null (empty response body)
-
-### Authorization
-
-[basic_auth](../README.md#basic_auth), [cookie_auth](../README.md#cookie_auth), [jwt_token](../README.md#jwt_token), [oidc_auth](../README.md#oidc_auth), [saml_auth](../README.md#saml_auth)
-
-### HTTP request headers
-
- - **Content-Type**: application/json
- - **Accept**: application/json
-
-### HTTP response details
-| Status code | Description | Response headers |
-|-------------|-------------|------------------|
-**204** | branch updated successfully | - |
-**400** | Validation Error | - |
-**401** | Unauthorized | - |
-**403** | Forbidden | - |
-**404** | Resource Not Found | - |
-**0** | Internal Server Error | - |
-
# **uploadObjectPreflight**
> uploadObjectPreflight(repository, branch, path)
diff --git a/clients/java/docs/ObjectStats.md b/clients/java/docs/ObjectStats.md
index 9b2ccd70ebd..ed818768da7 100644
--- a/clients/java/docs/ObjectStats.md
+++ b/clients/java/docs/ObjectStats.md
@@ -10,7 +10,7 @@ Name | Type | Description | Notes
**path** | **String** | |
**pathType** | [**PathTypeEnum**](#PathTypeEnum) | |
**physicalAddress** | **String** | The location of the object on the underlying object store. Formatted as a native URI with the object store type as scheme (\"s3://...\", \"gs://...\", etc.) Or, in the case of presign=true, will be an HTTP URL to be consumed via regular HTTP GET |
-**physicalAddressExpiry** | **Long** | If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional]
+**physicalAddressExpiry** | **Long** | If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional]
**checksum** | **String** | |
**sizeBytes** | **Long** | | [optional]
**mtime** | **Long** | Unix Epoch in seconds |
diff --git a/clients/java/docs/StageRangeCreation.md b/clients/java/docs/StageRangeCreation.md
deleted file mode 100644
index 9ffe0932b0a..00000000000
--- a/clients/java/docs/StageRangeCreation.md
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
-# StageRangeCreation
-
-
-## Properties
-
-Name | Type | Description | Notes
------------- | ------------- | ------------- | -------------
-**fromSourceURI** | **String** | The source location of the ingested files. Must match the lakeFS installation blockstore type. |
-**after** | **String** | Only objects after this key would be ingested. |
-**prepend** | **String** | A prefix to prepend to ingested objects. |
-**continuationToken** | **String** | Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key. | [optional]
-**stagingToken** | **String** | Opaque. Client should pass staging_token if received from server on previous request | [optional]
-
-
-
diff --git a/clients/java/docs/StagingLocation.md b/clients/java/docs/StagingLocation.md
index b28a1d95e08..aa20403bec4 100644
--- a/clients/java/docs/StagingLocation.md
+++ b/clients/java/docs/StagingLocation.md
@@ -10,8 +10,8 @@ Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**physicalAddress** | **String** | | [optional]
**token** | **String** | opaque staging token to use to link uploaded object |
-**presignedUrl** | **String** | if presign=true is passed in the request, this field will contain a presigned URL to use when uploading | [optional]
-**presignedUrlExpiry** | **Long** | If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional]
+**presignedUrl** | **String** | if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading | [optional]
+**presignedUrlExpiry** | **Long** | If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional]
diff --git a/clients/java/docs/UpdatePasswordByToken.md b/clients/java/docs/UpdatePasswordByToken.md
deleted file mode 100644
index 401c7b16338..00000000000
--- a/clients/java/docs/UpdatePasswordByToken.md
+++ /dev/null
@@ -1,15 +0,0 @@
-
-
-# UpdatePasswordByToken
-
-
-## Properties
-
-Name | Type | Description | Notes
------------- | ------------- | ------------- | -------------
-**token** | **String** | token used for authentication |
-**newPassword** | **String** | new password to update |
-**email** | **String** | optional user email to match the token for verification | [optional]
-
-
-
diff --git a/clients/java/docs/User.md b/clients/java/docs/User.md
index 53510057b23..fe543e554df 100644
--- a/clients/java/docs/User.md
+++ b/clients/java/docs/User.md
@@ -7,10 +7,9 @@
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
-**id** | **String** | a unique identifier for the user. In password-based authentication, this is the email. |
+**id** | **String** | a unique identifier for the user. |
**creationDate** | **Long** | Unix Epoch in seconds |
**friendlyName** | **String** | | [optional]
-**email** | **String** | | [optional]
diff --git a/clients/java/docs/UserCreation.md b/clients/java/docs/UserCreation.md
index a384671ee29..12d8a5af0a7 100644
--- a/clients/java/docs/UserCreation.md
+++ b/clients/java/docs/UserCreation.md
@@ -7,7 +7,7 @@
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
-**id** | **String** | a unique identifier for the user. In password-based authentication, this is the email. |
+**id** | **String** | a unique identifier for the user. |
**inviteUser** | **Boolean** | | [optional]
diff --git a/clients/java/pom.xml b/clients/java/pom.xml
index b3038b2f53f..5d823e368e4 100644
--- a/clients/java/pom.xml
+++ b/clients/java/pom.xml
@@ -22,7 +22,7 @@
apache2
- http://www.apache.org/licenses/LICENSE-2.0.html
+ https://www.apache.org/licenses/LICENSE-2.0.htmlrepo
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/AuthApi.java b/clients/java/src/main/java/io/lakefs/clients/api/AuthApi.java
index 520cb2bab47..f9013f05c59 100644
--- a/clients/java/src/main/java/io/lakefs/clients/api/AuthApi.java
+++ b/clients/java/src/main/java/io/lakefs/clients/api/AuthApi.java
@@ -35,14 +35,12 @@
import io.lakefs.clients.api.model.CurrentUser;
import io.lakefs.clients.api.model.Error;
import io.lakefs.clients.api.model.ErrorNoACL;
-import io.lakefs.clients.api.model.ForgotPasswordRequest;
import io.lakefs.clients.api.model.Group;
import io.lakefs.clients.api.model.GroupCreation;
import io.lakefs.clients.api.model.GroupList;
import io.lakefs.clients.api.model.LoginInformation;
import io.lakefs.clients.api.model.Policy;
import io.lakefs.clients.api.model.PolicyList;
-import io.lakefs.clients.api.model.UpdatePasswordByToken;
import io.lakefs.clients.api.model.User;
import io.lakefs.clients.api.model.UserCreation;
import io.lakefs.clients.api.model.UserList;
@@ -1833,121 +1831,6 @@ public okhttp3.Call detachPolicyFromUserAsync(String userId, String policyId, fi
localVarApiClient.executeAsync(localVarCall, _callback);
return localVarCall;
}
- /**
- * Build call for forgotPassword
- * @param forgotPasswordRequest (required)
- * @param _callback Callback for upload/download progress
- * @return Call to execute
- * @throws ApiException If fail to serialize the request body object
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
204
No content
-
-
400
Bad Request
-
-
0
Internal Server Error
-
-
- */
- public okhttp3.Call forgotPasswordCall(ForgotPasswordRequest forgotPasswordRequest, final ApiCallback _callback) throws ApiException {
- Object localVarPostBody = forgotPasswordRequest;
-
- // create path and map variables
- String localVarPath = "/auth/password/forgot";
-
- List localVarQueryParams = new ArrayList();
- List localVarCollectionQueryParams = new ArrayList();
- Map localVarHeaderParams = new HashMap();
- Map localVarCookieParams = new HashMap();
- Map localVarFormParams = new HashMap();
-
- final String[] localVarAccepts = {
- "application/json"
- };
- final String localVarAccept = localVarApiClient.selectHeaderAccept(localVarAccepts);
- if (localVarAccept != null) {
- localVarHeaderParams.put("Accept", localVarAccept);
- }
-
- final String[] localVarContentTypes = {
- "application/json"
- };
- final String localVarContentType = localVarApiClient.selectHeaderContentType(localVarContentTypes);
- localVarHeaderParams.put("Content-Type", localVarContentType);
-
- String[] localVarAuthNames = new String[] { };
- return localVarApiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAuthNames, _callback);
- }
-
- @SuppressWarnings("rawtypes")
- private okhttp3.Call forgotPasswordValidateBeforeCall(ForgotPasswordRequest forgotPasswordRequest, final ApiCallback _callback) throws ApiException {
-
- // verify the required parameter 'forgotPasswordRequest' is set
- if (forgotPasswordRequest == null) {
- throw new ApiException("Missing the required parameter 'forgotPasswordRequest' when calling forgotPassword(Async)");
- }
-
-
- okhttp3.Call localVarCall = forgotPasswordCall(forgotPasswordRequest, _callback);
- return localVarCall;
-
- }
-
- /**
- * forgot password request initiates the password reset process
- *
- * @param forgotPasswordRequest (required)
- * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
204
No content
-
-
400
Bad Request
-
-
0
Internal Server Error
-
-
- */
- public void forgotPassword(ForgotPasswordRequest forgotPasswordRequest) throws ApiException {
- forgotPasswordWithHttpInfo(forgotPasswordRequest);
- }
-
- /**
- * forgot password request initiates the password reset process
- *
- * @param forgotPasswordRequest (required)
- * @return ApiResponse<Void>
- * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
204
No content
-
-
400
Bad Request
-
-
0
Internal Server Error
-
-
- */
- public ApiResponse forgotPasswordWithHttpInfo(ForgotPasswordRequest forgotPasswordRequest) throws ApiException {
- okhttp3.Call localVarCall = forgotPasswordValidateBeforeCall(forgotPasswordRequest, null);
- return localVarApiClient.execute(localVarCall);
- }
-
- /**
- * forgot password request initiates the password reset process (asynchronously)
- *
- * @param forgotPasswordRequest (required)
- * @param _callback The callback to be executed when the API call finishes
- * @return The request call
- * @throws ApiException If fail to process the API call, e.g. serializing the request body object
- * @http.response.details
-
@@ -4069,121 +3952,6 @@ public okhttp3.Call setGroupACLAsync(String groupId, ACL ACL, final ApiCallback<
localVarApiClient.executeAsync(localVarCall, _callback);
return localVarCall;
}
- /**
- * Build call for updatePassword
- * @param updatePasswordByToken (required)
- * @param _callback Callback for upload/download progress
- * @return Call to execute
- * @throws ApiException If fail to serialize the request body object
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
201
successful reset
-
-
401
Unauthorized
-
-
0
Internal Server Error
-
-
- */
- public okhttp3.Call updatePasswordCall(UpdatePasswordByToken updatePasswordByToken, final ApiCallback _callback) throws ApiException {
- Object localVarPostBody = updatePasswordByToken;
-
- // create path and map variables
- String localVarPath = "/auth/password";
-
- List localVarQueryParams = new ArrayList();
- List localVarCollectionQueryParams = new ArrayList();
- Map localVarHeaderParams = new HashMap();
- Map localVarCookieParams = new HashMap();
- Map localVarFormParams = new HashMap();
-
- final String[] localVarAccepts = {
- "application/json"
- };
- final String localVarAccept = localVarApiClient.selectHeaderAccept(localVarAccepts);
- if (localVarAccept != null) {
- localVarHeaderParams.put("Accept", localVarAccept);
- }
-
- final String[] localVarContentTypes = {
- "application/json"
- };
- final String localVarContentType = localVarApiClient.selectHeaderContentType(localVarContentTypes);
- localVarHeaderParams.put("Content-Type", localVarContentType);
-
- String[] localVarAuthNames = new String[] { "cookie_auth" };
- return localVarApiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAuthNames, _callback);
- }
-
- @SuppressWarnings("rawtypes")
- private okhttp3.Call updatePasswordValidateBeforeCall(UpdatePasswordByToken updatePasswordByToken, final ApiCallback _callback) throws ApiException {
-
- // verify the required parameter 'updatePasswordByToken' is set
- if (updatePasswordByToken == null) {
- throw new ApiException("Missing the required parameter 'updatePasswordByToken' when calling updatePassword(Async)");
- }
-
-
- okhttp3.Call localVarCall = updatePasswordCall(updatePasswordByToken, _callback);
- return localVarCall;
-
- }
-
- /**
- * Update user password by reset_password token
- *
- * @param updatePasswordByToken (required)
- * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
201
successful reset
-
-
401
Unauthorized
-
-
0
Internal Server Error
-
-
- */
- public void updatePassword(UpdatePasswordByToken updatePasswordByToken) throws ApiException {
- updatePasswordWithHttpInfo(updatePasswordByToken);
- }
-
- /**
- * Update user password by reset_password token
- *
- * @param updatePasswordByToken (required)
- * @return ApiResponse<Void>
- * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
201
successful reset
-
-
401
Unauthorized
-
-
0
Internal Server Error
-
-
- */
- public ApiResponse updatePasswordWithHttpInfo(UpdatePasswordByToken updatePasswordByToken) throws ApiException {
- okhttp3.Call localVarCall = updatePasswordValidateBeforeCall(updatePasswordByToken, null);
- return localVarApiClient.execute(localVarCall);
- }
-
- /**
- * Update user password by reset_password token (asynchronously)
- *
- * @param updatePasswordByToken (required)
- * @param _callback The callback to be executed when the API call finishes
- * @return The request call
- * @throws ApiException If fail to process the API call, e.g. serializing the request body object
- * @http.response.details
-
- */
- public okhttp3.Call createMetaRangeCall(String repository, MetaRangeCreation metaRangeCreation, final ApiCallback _callback) throws ApiException {
- Object localVarPostBody = metaRangeCreation;
-
- // create path and map variables
- String localVarPath = "/repositories/{repository}/branches/metaranges"
- .replaceAll("\\{" + "repository" + "\\}", localVarApiClient.escapeString(repository.toString()));
-
- List localVarQueryParams = new ArrayList();
- List localVarCollectionQueryParams = new ArrayList();
- Map localVarHeaderParams = new HashMap();
- Map localVarCookieParams = new HashMap();
- Map localVarFormParams = new HashMap();
-
- final String[] localVarAccepts = {
- "application/json"
- };
- final String localVarAccept = localVarApiClient.selectHeaderAccept(localVarAccepts);
- if (localVarAccept != null) {
- localVarHeaderParams.put("Accept", localVarAccept);
- }
-
- final String[] localVarContentTypes = {
- "application/json"
- };
- final String localVarContentType = localVarApiClient.selectHeaderContentType(localVarContentTypes);
- localVarHeaderParams.put("Content-Type", localVarContentType);
-
- String[] localVarAuthNames = new String[] { "basic_auth", "cookie_auth", "jwt_token", "oidc_auth", "saml_auth" };
- return localVarApiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAuthNames, _callback);
- }
-
- @SuppressWarnings("rawtypes")
- private okhttp3.Call createMetaRangeValidateBeforeCall(String repository, MetaRangeCreation metaRangeCreation, final ApiCallback _callback) throws ApiException {
-
- // verify the required parameter 'repository' is set
- if (repository == null) {
- throw new ApiException("Missing the required parameter 'repository' when calling createMetaRange(Async)");
- }
-
- // verify the required parameter 'metaRangeCreation' is set
- if (metaRangeCreation == null) {
- throw new ApiException("Missing the required parameter 'metaRangeCreation' when calling createMetaRange(Async)");
- }
-
-
- okhttp3.Call localVarCall = createMetaRangeCall(repository, metaRangeCreation, _callback);
- return localVarCall;
-
- }
-
- /**
- * create a lakeFS metarange file from the given ranges
- *
- * @param repository (required)
- * @param metaRangeCreation (required)
- * @return MetaRangeCreationResponse
- * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
201
metarange metadata
-
-
400
Validation Error
-
-
401
Unauthorized
-
-
403
Forbidden
-
-
404
Resource Not Found
-
-
0
Internal Server Error
-
-
- */
- public MetaRangeCreationResponse createMetaRange(String repository, MetaRangeCreation metaRangeCreation) throws ApiException {
- ApiResponse localVarResp = createMetaRangeWithHttpInfo(repository, metaRangeCreation);
- return localVarResp.getData();
- }
-
- /**
- * create a lakeFS metarange file from the given ranges
- *
- * @param repository (required)
- * @param metaRangeCreation (required)
- * @return ApiResponse<MetaRangeCreationResponse>
- * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
201
metarange metadata
-
-
400
Validation Error
-
-
401
Unauthorized
-
-
403
Forbidden
-
-
404
Resource Not Found
-
-
0
Internal Server Error
-
-
- */
- public ApiResponse createMetaRangeWithHttpInfo(String repository, MetaRangeCreation metaRangeCreation) throws ApiException {
- okhttp3.Call localVarCall = createMetaRangeValidateBeforeCall(repository, metaRangeCreation, null);
- Type localVarReturnType = new TypeToken(){}.getType();
- return localVarApiClient.execute(localVarCall, localVarReturnType);
- }
-
- /**
- * create a lakeFS metarange file from the given ranges (asynchronously)
- *
- * @param repository (required)
- * @param metaRangeCreation (required)
- * @param _callback The callback to be executed when the API call finishes
- * @return The request call
- * @throws ApiException If fail to process the API call, e.g. serializing the request body object
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
201
metarange metadata
-
-
400
Validation Error
-
-
401
Unauthorized
-
-
403
Forbidden
-
-
404
Resource Not Found
-
-
0
Internal Server Error
-
-
- */
- public okhttp3.Call createMetaRangeAsync(String repository, MetaRangeCreation metaRangeCreation, final ApiCallback _callback) throws ApiException {
-
- okhttp3.Call localVarCall = createMetaRangeValidateBeforeCall(repository, metaRangeCreation, _callback);
- Type localVarReturnType = new TypeToken(){}.getType();
- localVarApiClient.executeAsync(localVarCall, localVarReturnType, _callback);
- return localVarCall;
- }
/**
* Build call for importCancel
* @param repository (required)
@@ -583,7 +438,7 @@ private okhttp3.Call importStatusValidateBeforeCall(String repository, String br
* @param repository (required)
* @param branch (required)
* @param id Unique identifier of the import process (required)
- * @return ImportStatusResp
+ * @return ImportStatus
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
* @http.response.details
*/
- public ImportStatusResp importStatus(String repository, String branch, String id) throws ApiException {
- ApiResponse localVarResp = importStatusWithHttpInfo(repository, branch, id);
+ public ImportStatus importStatus(String repository, String branch, String id) throws ApiException {
+ ApiResponse localVarResp = importStatusWithHttpInfo(repository, branch, id);
return localVarResp.getData();
}
@@ -605,7 +460,7 @@ public ImportStatusResp importStatus(String repository, String branch, String id
* @param repository (required)
* @param branch (required)
* @param id Unique identifier of the import process (required)
- * @return ApiResponse<ImportStatusResp>
+ * @return ApiResponse<ImportStatus>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
* @http.response.details
@@ -616,9 +471,9 @@ public ImportStatusResp importStatus(String repository, String branch, String id
0
Internal Server Error
-
*/
- public ApiResponse importStatusWithHttpInfo(String repository, String branch, String id) throws ApiException {
+ public ApiResponse importStatusWithHttpInfo(String repository, String branch, String id) throws ApiException {
okhttp3.Call localVarCall = importStatusValidateBeforeCall(repository, branch, id, null);
- Type localVarReturnType = new TypeToken(){}.getType();
+ Type localVarReturnType = new TypeToken(){}.getType();
return localVarApiClient.execute(localVarCall, localVarReturnType);
}
@@ -640,147 +495,10 @@ public ApiResponse importStatusWithHttpInfo(String repository,
0
Internal Server Error
-
*/
- public okhttp3.Call importStatusAsync(String repository, String branch, String id, final ApiCallback _callback) throws ApiException {
+ public okhttp3.Call importStatusAsync(String repository, String branch, String id, final ApiCallback _callback) throws ApiException {
okhttp3.Call localVarCall = importStatusValidateBeforeCall(repository, branch, id, _callback);
- Type localVarReturnType = new TypeToken(){}.getType();
- localVarApiClient.executeAsync(localVarCall, localVarReturnType, _callback);
- return localVarCall;
- }
- /**
- * Build call for ingestRange
- * @param repository (required)
- * @param stageRangeCreation (required)
- * @param _callback Callback for upload/download progress
- * @return Call to execute
- * @throws ApiException If fail to serialize the request body object
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
201
range metadata
-
-
400
Validation Error
-
-
401
Unauthorized
-
-
404
Resource Not Found
-
-
0
Internal Server Error
-
-
- */
- public okhttp3.Call ingestRangeCall(String repository, StageRangeCreation stageRangeCreation, final ApiCallback _callback) throws ApiException {
- Object localVarPostBody = stageRangeCreation;
-
- // create path and map variables
- String localVarPath = "/repositories/{repository}/branches/ranges"
- .replaceAll("\\{" + "repository" + "\\}", localVarApiClient.escapeString(repository.toString()));
-
- List localVarQueryParams = new ArrayList();
- List localVarCollectionQueryParams = new ArrayList();
- Map localVarHeaderParams = new HashMap();
- Map localVarCookieParams = new HashMap();
- Map localVarFormParams = new HashMap();
-
- final String[] localVarAccepts = {
- "application/json"
- };
- final String localVarAccept = localVarApiClient.selectHeaderAccept(localVarAccepts);
- if (localVarAccept != null) {
- localVarHeaderParams.put("Accept", localVarAccept);
- }
-
- final String[] localVarContentTypes = {
- "application/json"
- };
- final String localVarContentType = localVarApiClient.selectHeaderContentType(localVarContentTypes);
- localVarHeaderParams.put("Content-Type", localVarContentType);
-
- String[] localVarAuthNames = new String[] { "basic_auth", "cookie_auth", "jwt_token", "oidc_auth", "saml_auth" };
- return localVarApiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAuthNames, _callback);
- }
-
- @SuppressWarnings("rawtypes")
- private okhttp3.Call ingestRangeValidateBeforeCall(String repository, StageRangeCreation stageRangeCreation, final ApiCallback _callback) throws ApiException {
-
- // verify the required parameter 'repository' is set
- if (repository == null) {
- throw new ApiException("Missing the required parameter 'repository' when calling ingestRange(Async)");
- }
-
- // verify the required parameter 'stageRangeCreation' is set
- if (stageRangeCreation == null) {
- throw new ApiException("Missing the required parameter 'stageRangeCreation' when calling ingestRange(Async)");
- }
-
-
- okhttp3.Call localVarCall = ingestRangeCall(repository, stageRangeCreation, _callback);
- return localVarCall;
-
- }
-
- /**
- * create a lakeFS range file from the source uri
- *
- * @param repository (required)
- * @param stageRangeCreation (required)
- * @return IngestRangeCreationResponse
- * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
201
range metadata
-
-
400
Validation Error
-
-
401
Unauthorized
-
-
404
Resource Not Found
-
-
0
Internal Server Error
-
-
- */
- public IngestRangeCreationResponse ingestRange(String repository, StageRangeCreation stageRangeCreation) throws ApiException {
- ApiResponse localVarResp = ingestRangeWithHttpInfo(repository, stageRangeCreation);
- return localVarResp.getData();
- }
-
- /**
- * create a lakeFS range file from the source uri
- *
- * @param repository (required)
- * @param stageRangeCreation (required)
- * @return ApiResponse<IngestRangeCreationResponse>
- * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
201
range metadata
-
-
400
Validation Error
-
-
401
Unauthorized
-
-
404
Resource Not Found
-
-
0
Internal Server Error
-
-
- */
- public ApiResponse ingestRangeWithHttpInfo(String repository, StageRangeCreation stageRangeCreation) throws ApiException {
- okhttp3.Call localVarCall = ingestRangeValidateBeforeCall(repository, stageRangeCreation, null);
- Type localVarReturnType = new TypeToken(){}.getType();
- return localVarApiClient.execute(localVarCall, localVarReturnType);
- }
-
- /**
- * create a lakeFS range file from the source uri (asynchronously)
- *
- * @param repository (required)
- * @param stageRangeCreation (required)
- * @param _callback The callback to be executed when the API call finishes
- * @return The request call
- * @throws ApiException If fail to process the API call, e.g. serializing the request body object
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
201
range metadata
-
-
400
Validation Error
-
-
401
Unauthorized
-
-
404
Resource Not Found
-
-
0
Internal Server Error
-
-
- */
- public okhttp3.Call ingestRangeAsync(String repository, StageRangeCreation stageRangeCreation, final ApiCallback _callback) throws ApiException {
-
- okhttp3.Call localVarCall = ingestRangeValidateBeforeCall(repository, stageRangeCreation, _callback);
- Type localVarReturnType = new TypeToken(){}.getType();
+ Type localVarReturnType = new TypeToken(){}.getType();
localVarApiClient.executeAsync(localVarCall, localVarReturnType, _callback);
return localVarCall;
}
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/InternalApi.java b/clients/java/src/main/java/io/lakefs/clients/api/InternalApi.java
index 1c0fc48eb98..554c1b6479a 100644
--- a/clients/java/src/main/java/io/lakefs/clients/api/InternalApi.java
+++ b/clients/java/src/main/java/io/lakefs/clients/api/InternalApi.java
@@ -34,7 +34,6 @@
import io.lakefs.clients.api.model.Setup;
import io.lakefs.clients.api.model.SetupState;
import io.lakefs.clients.api.model.StatsEventsList;
-import io.lakefs.clients.api.model.UpdateToken;
import java.lang.reflect.Type;
import java.util.ArrayList;
@@ -878,153 +877,6 @@ public okhttp3.Call setupCommPrefsAsync(CommPrefsInput commPrefsInput, final Api
localVarApiClient.executeAsync(localVarCall, _callback);
return localVarCall;
}
- /**
- * Build call for updateBranchToken
- * @param repository (required)
- * @param branch (required)
- * @param updateToken (required)
- * @param _callback Callback for upload/download progress
- * @return Call to execute
- * @throws ApiException If fail to serialize the request body object
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
204
branch updated successfully
-
-
400
Validation Error
-
-
401
Unauthorized
-
-
403
Forbidden
-
-
404
Resource Not Found
-
-
0
Internal Server Error
-
-
- */
- public okhttp3.Call updateBranchTokenCall(String repository, String branch, UpdateToken updateToken, final ApiCallback _callback) throws ApiException {
- Object localVarPostBody = updateToken;
-
- // create path and map variables
- String localVarPath = "/repositories/{repository}/branches/{branch}/update_token"
- .replaceAll("\\{" + "repository" + "\\}", localVarApiClient.escapeString(repository.toString()))
- .replaceAll("\\{" + "branch" + "\\}", localVarApiClient.escapeString(branch.toString()));
-
- List localVarQueryParams = new ArrayList();
- List localVarCollectionQueryParams = new ArrayList();
- Map localVarHeaderParams = new HashMap();
- Map localVarCookieParams = new HashMap();
- Map localVarFormParams = new HashMap();
-
- final String[] localVarAccepts = {
- "application/json"
- };
- final String localVarAccept = localVarApiClient.selectHeaderAccept(localVarAccepts);
- if (localVarAccept != null) {
- localVarHeaderParams.put("Accept", localVarAccept);
- }
-
- final String[] localVarContentTypes = {
- "application/json"
- };
- final String localVarContentType = localVarApiClient.selectHeaderContentType(localVarContentTypes);
- localVarHeaderParams.put("Content-Type", localVarContentType);
-
- String[] localVarAuthNames = new String[] { "basic_auth", "cookie_auth", "jwt_token", "oidc_auth", "saml_auth" };
- return localVarApiClient.buildCall(localVarPath, "PUT", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAuthNames, _callback);
- }
-
- @SuppressWarnings("rawtypes")
- private okhttp3.Call updateBranchTokenValidateBeforeCall(String repository, String branch, UpdateToken updateToken, final ApiCallback _callback) throws ApiException {
-
- // verify the required parameter 'repository' is set
- if (repository == null) {
- throw new ApiException("Missing the required parameter 'repository' when calling updateBranchToken(Async)");
- }
-
- // verify the required parameter 'branch' is set
- if (branch == null) {
- throw new ApiException("Missing the required parameter 'branch' when calling updateBranchToken(Async)");
- }
-
- // verify the required parameter 'updateToken' is set
- if (updateToken == null) {
- throw new ApiException("Missing the required parameter 'updateToken' when calling updateBranchToken(Async)");
- }
-
-
- okhttp3.Call localVarCall = updateBranchTokenCall(repository, branch, updateToken, _callback);
- return localVarCall;
-
- }
-
- /**
- * modify branch staging token
- *
- * @param repository (required)
- * @param branch (required)
- * @param updateToken (required)
- * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
204
branch updated successfully
-
-
400
Validation Error
-
-
401
Unauthorized
-
-
403
Forbidden
-
-
404
Resource Not Found
-
-
0
Internal Server Error
-
-
- */
- public void updateBranchToken(String repository, String branch, UpdateToken updateToken) throws ApiException {
- updateBranchTokenWithHttpInfo(repository, branch, updateToken);
- }
-
- /**
- * modify branch staging token
- *
- * @param repository (required)
- * @param branch (required)
- * @param updateToken (required)
- * @return ApiResponse<Void>
- * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
204
branch updated successfully
-
-
400
Validation Error
-
-
401
Unauthorized
-
-
403
Forbidden
-
-
404
Resource Not Found
-
-
0
Internal Server Error
-
-
- */
- public ApiResponse updateBranchTokenWithHttpInfo(String repository, String branch, UpdateToken updateToken) throws ApiException {
- okhttp3.Call localVarCall = updateBranchTokenValidateBeforeCall(repository, branch, updateToken, null);
- return localVarApiClient.execute(localVarCall);
- }
-
- /**
- * modify branch staging token (asynchronously)
- *
- * @param repository (required)
- * @param branch (required)
- * @param updateToken (required)
- * @param _callback The callback to be executed when the API call finishes
- * @return The request call
- * @throws ApiException If fail to process the API call, e.g. serializing the request body object
- * @http.response.details
-
-
Status Code
Description
Response Headers
-
204
branch updated successfully
-
-
400
Validation Error
-
-
401
Unauthorized
-
-
403
Forbidden
-
-
404
Resource Not Found
-
-
0
Internal Server Error
-
-
- */
- public okhttp3.Call updateBranchTokenAsync(String repository, String branch, UpdateToken updateToken, final ApiCallback _callback) throws ApiException {
-
- okhttp3.Call localVarCall = updateBranchTokenValidateBeforeCall(repository, branch, updateToken, _callback);
- localVarApiClient.executeAsync(localVarCall, _callback);
- return localVarCall;
- }
/**
* Build call for uploadObjectPreflight
* @param repository (required)
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/ForgotPasswordRequest.java b/clients/java/src/main/java/io/lakefs/clients/api/model/ForgotPasswordRequest.java
deleted file mode 100644
index 4aecfceeb90..00000000000
--- a/clients/java/src/main/java/io/lakefs/clients/api/model/ForgotPasswordRequest.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * lakeFS API
- * lakeFS HTTP API
- *
- * The version of the OpenAPI document: 0.1.0
- *
- *
- * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
- * https://openapi-generator.tech
- * Do not edit the class manually.
- */
-
-
-package io.lakefs.clients.api.model;
-
-import java.util.Objects;
-import java.util.Arrays;
-import com.google.gson.TypeAdapter;
-import com.google.gson.annotations.JsonAdapter;
-import com.google.gson.annotations.SerializedName;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonWriter;
-import io.swagger.annotations.ApiModel;
-import io.swagger.annotations.ApiModelProperty;
-import java.io.IOException;
-
-/**
- * ForgotPasswordRequest
- */
-@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen")
-public class ForgotPasswordRequest {
- public static final String SERIALIZED_NAME_EMAIL = "email";
- @SerializedName(SERIALIZED_NAME_EMAIL)
- private String email;
-
-
- public ForgotPasswordRequest email(String email) {
-
- this.email = email;
- return this;
- }
-
- /**
- * Get email
- * @return email
- **/
- @javax.annotation.Nonnull
- @ApiModelProperty(required = true, value = "")
-
- public String getEmail() {
- return email;
- }
-
-
- public void setEmail(String email) {
- this.email = email;
- }
-
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
- ForgotPasswordRequest forgotPasswordRequest = (ForgotPasswordRequest) o;
- return Objects.equals(this.email, forgotPasswordRequest.email);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(email);
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder();
- sb.append("class ForgotPasswordRequest {\n");
- sb.append(" email: ").append(toIndentedString(email)).append("\n");
- sb.append("}");
- return sb.toString();
- }
-
- /**
- * Convert the given object to string with each line indented by 4 spaces
- * (except the first line).
- */
- private String toIndentedString(Object o) {
- if (o == null) {
- return "null";
- }
- return o.toString().replace("\n", "\n ");
- }
-
-}
-
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/ImportLocation.java b/clients/java/src/main/java/io/lakefs/clients/api/model/ImportLocation.java
index 6447ee9b3e0..9de81d0bbfb 100644
--- a/clients/java/src/main/java/io/lakefs/clients/api/model/ImportLocation.java
+++ b/clients/java/src/main/java/io/lakefs/clients/api/model/ImportLocation.java
@@ -119,11 +119,11 @@ public ImportLocation path(String path) {
}
/**
- * A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type.
+ * A source location to import path or to a single object. Must match the lakeFS installation blockstore type.
* @return path
**/
@javax.annotation.Nonnull
- @ApiModelProperty(example = "s3://my-bucket/production/collections/", required = true, value = "A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type.")
+ @ApiModelProperty(example = "s3://my-bucket/production/collections/", required = true, value = "A source location to import path or to a single object. Must match the lakeFS installation blockstore type.")
public String getPath() {
return path;
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/ImportPagination.java b/clients/java/src/main/java/io/lakefs/clients/api/model/ImportPagination.java
deleted file mode 100644
index 22c650d81ca..00000000000
--- a/clients/java/src/main/java/io/lakefs/clients/api/model/ImportPagination.java
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
- * lakeFS API
- * lakeFS HTTP API
- *
- * The version of the OpenAPI document: 0.1.0
- *
- *
- * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
- * https://openapi-generator.tech
- * Do not edit the class manually.
- */
-
-
-package io.lakefs.clients.api.model;
-
-import java.util.Objects;
-import java.util.Arrays;
-import com.google.gson.TypeAdapter;
-import com.google.gson.annotations.JsonAdapter;
-import com.google.gson.annotations.SerializedName;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonWriter;
-import io.swagger.annotations.ApiModel;
-import io.swagger.annotations.ApiModelProperty;
-import java.io.IOException;
-
-/**
- * ImportPagination
- */
-@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen")
-public class ImportPagination {
- public static final String SERIALIZED_NAME_HAS_MORE = "has_more";
- @SerializedName(SERIALIZED_NAME_HAS_MORE)
- private Boolean hasMore;
-
- public static final String SERIALIZED_NAME_CONTINUATION_TOKEN = "continuation_token";
- @SerializedName(SERIALIZED_NAME_CONTINUATION_TOKEN)
- private String continuationToken;
-
- public static final String SERIALIZED_NAME_LAST_KEY = "last_key";
- @SerializedName(SERIALIZED_NAME_LAST_KEY)
- private String lastKey;
-
- public static final String SERIALIZED_NAME_STAGING_TOKEN = "staging_token";
- @SerializedName(SERIALIZED_NAME_STAGING_TOKEN)
- private String stagingToken;
-
-
- public ImportPagination hasMore(Boolean hasMore) {
-
- this.hasMore = hasMore;
- return this;
- }
-
- /**
- * More keys to be ingested.
- * @return hasMore
- **/
- @javax.annotation.Nonnull
- @ApiModelProperty(required = true, value = "More keys to be ingested.")
-
- public Boolean getHasMore() {
- return hasMore;
- }
-
-
- public void setHasMore(Boolean hasMore) {
- this.hasMore = hasMore;
- }
-
-
- public ImportPagination continuationToken(String continuationToken) {
-
- this.continuationToken = continuationToken;
- return this;
- }
-
- /**
- * Opaque. Token used to import the next range.
- * @return continuationToken
- **/
- @javax.annotation.Nullable
- @ApiModelProperty(value = "Opaque. Token used to import the next range.")
-
- public String getContinuationToken() {
- return continuationToken;
- }
-
-
- public void setContinuationToken(String continuationToken) {
- this.continuationToken = continuationToken;
- }
-
-
- public ImportPagination lastKey(String lastKey) {
-
- this.lastKey = lastKey;
- return this;
- }
-
- /**
- * Last object store key that was ingested.
- * @return lastKey
- **/
- @javax.annotation.Nonnull
- @ApiModelProperty(required = true, value = "Last object store key that was ingested.")
-
- public String getLastKey() {
- return lastKey;
- }
-
-
- public void setLastKey(String lastKey) {
- this.lastKey = lastKey;
- }
-
-
- public ImportPagination stagingToken(String stagingToken) {
-
- this.stagingToken = stagingToken;
- return this;
- }
-
- /**
- * Staging token for skipped objects during ingest
- * @return stagingToken
- **/
- @javax.annotation.Nullable
- @ApiModelProperty(value = "Staging token for skipped objects during ingest")
-
- public String getStagingToken() {
- return stagingToken;
- }
-
-
- public void setStagingToken(String stagingToken) {
- this.stagingToken = stagingToken;
- }
-
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
- ImportPagination importPagination = (ImportPagination) o;
- return Objects.equals(this.hasMore, importPagination.hasMore) &&
- Objects.equals(this.continuationToken, importPagination.continuationToken) &&
- Objects.equals(this.lastKey, importPagination.lastKey) &&
- Objects.equals(this.stagingToken, importPagination.stagingToken);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(hasMore, continuationToken, lastKey, stagingToken);
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder();
- sb.append("class ImportPagination {\n");
- sb.append(" hasMore: ").append(toIndentedString(hasMore)).append("\n");
- sb.append(" continuationToken: ").append(toIndentedString(continuationToken)).append("\n");
- sb.append(" lastKey: ").append(toIndentedString(lastKey)).append("\n");
- sb.append(" stagingToken: ").append(toIndentedString(stagingToken)).append("\n");
- sb.append("}");
- return sb.toString();
- }
-
- /**
- * Convert the given object to string with each line indented by 4 spaces
- * (except the first line).
- */
- private String toIndentedString(Object o) {
- if (o == null) {
- return "null";
- }
- return o.toString().replace("\n", "\n ");
- }
-
-}
-
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/ImportStatusResp.java b/clients/java/src/main/java/io/lakefs/clients/api/model/ImportStatus.java
similarity index 84%
rename from clients/java/src/main/java/io/lakefs/clients/api/model/ImportStatusResp.java
rename to clients/java/src/main/java/io/lakefs/clients/api/model/ImportStatus.java
index de3c94f1a30..dd10756f815 100644
--- a/clients/java/src/main/java/io/lakefs/clients/api/model/ImportStatusResp.java
+++ b/clients/java/src/main/java/io/lakefs/clients/api/model/ImportStatus.java
@@ -28,10 +28,10 @@
import org.threeten.bp.OffsetDateTime;
/**
- * ImportStatusResp
+ * ImportStatus
*/
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen")
-public class ImportStatusResp {
+public class ImportStatus {
public static final String SERIALIZED_NAME_COMPLETED = "completed";
@SerializedName(SERIALIZED_NAME_COMPLETED)
private Boolean completed;
@@ -57,7 +57,7 @@ public class ImportStatusResp {
private Error error;
- public ImportStatusResp completed(Boolean completed) {
+ public ImportStatus completed(Boolean completed) {
this.completed = completed;
return this;
@@ -80,7 +80,7 @@ public void setCompleted(Boolean completed) {
}
- public ImportStatusResp updateTime(OffsetDateTime updateTime) {
+ public ImportStatus updateTime(OffsetDateTime updateTime) {
this.updateTime = updateTime;
return this;
@@ -103,7 +103,7 @@ public void setUpdateTime(OffsetDateTime updateTime) {
}
- public ImportStatusResp ingestedObjects(Long ingestedObjects) {
+ public ImportStatus ingestedObjects(Long ingestedObjects) {
this.ingestedObjects = ingestedObjects;
return this;
@@ -126,7 +126,7 @@ public void setIngestedObjects(Long ingestedObjects) {
}
- public ImportStatusResp metarangeId(String metarangeId) {
+ public ImportStatus metarangeId(String metarangeId) {
this.metarangeId = metarangeId;
return this;
@@ -149,7 +149,7 @@ public void setMetarangeId(String metarangeId) {
}
- public ImportStatusResp commit(Commit commit) {
+ public ImportStatus commit(Commit commit) {
this.commit = commit;
return this;
@@ -172,7 +172,7 @@ public void setCommit(Commit commit) {
}
- public ImportStatusResp error(Error error) {
+ public ImportStatus error(Error error) {
this.error = error;
return this;
@@ -203,13 +203,13 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) {
return false;
}
- ImportStatusResp importStatusResp = (ImportStatusResp) o;
- return Objects.equals(this.completed, importStatusResp.completed) &&
- Objects.equals(this.updateTime, importStatusResp.updateTime) &&
- Objects.equals(this.ingestedObjects, importStatusResp.ingestedObjects) &&
- Objects.equals(this.metarangeId, importStatusResp.metarangeId) &&
- Objects.equals(this.commit, importStatusResp.commit) &&
- Objects.equals(this.error, importStatusResp.error);
+ ImportStatus importStatus = (ImportStatus) o;
+ return Objects.equals(this.completed, importStatus.completed) &&
+ Objects.equals(this.updateTime, importStatus.updateTime) &&
+ Objects.equals(this.ingestedObjects, importStatus.ingestedObjects) &&
+ Objects.equals(this.metarangeId, importStatus.metarangeId) &&
+ Objects.equals(this.commit, importStatus.commit) &&
+ Objects.equals(this.error, importStatus.error);
}
@Override
@@ -220,7 +220,7 @@ public int hashCode() {
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
- sb.append("class ImportStatusResp {\n");
+ sb.append("class ImportStatus {\n");
sb.append(" completed: ").append(toIndentedString(completed)).append("\n");
sb.append(" updateTime: ").append(toIndentedString(updateTime)).append("\n");
sb.append(" ingestedObjects: ").append(toIndentedString(ingestedObjects)).append("\n");
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/IngestRangeCreationResponse.java b/clients/java/src/main/java/io/lakefs/clients/api/model/IngestRangeCreationResponse.java
deleted file mode 100644
index eef58e3a41c..00000000000
--- a/clients/java/src/main/java/io/lakefs/clients/api/model/IngestRangeCreationResponse.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * lakeFS API
- * lakeFS HTTP API
- *
- * The version of the OpenAPI document: 0.1.0
- *
- *
- * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
- * https://openapi-generator.tech
- * Do not edit the class manually.
- */
-
-
-package io.lakefs.clients.api.model;
-
-import java.util.Objects;
-import java.util.Arrays;
-import com.google.gson.TypeAdapter;
-import com.google.gson.annotations.JsonAdapter;
-import com.google.gson.annotations.SerializedName;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonWriter;
-import io.lakefs.clients.api.model.ImportPagination;
-import io.lakefs.clients.api.model.RangeMetadata;
-import io.swagger.annotations.ApiModel;
-import io.swagger.annotations.ApiModelProperty;
-import java.io.IOException;
-
-/**
- * IngestRangeCreationResponse
- */
-@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen")
-public class IngestRangeCreationResponse {
- public static final String SERIALIZED_NAME_RANGE = "range";
- @SerializedName(SERIALIZED_NAME_RANGE)
- private RangeMetadata range;
-
- public static final String SERIALIZED_NAME_PAGINATION = "pagination";
- @SerializedName(SERIALIZED_NAME_PAGINATION)
- private ImportPagination pagination;
-
-
- public IngestRangeCreationResponse range(RangeMetadata range) {
-
- this.range = range;
- return this;
- }
-
- /**
- * Get range
- * @return range
- **/
- @javax.annotation.Nullable
- @ApiModelProperty(value = "")
-
- public RangeMetadata getRange() {
- return range;
- }
-
-
- public void setRange(RangeMetadata range) {
- this.range = range;
- }
-
-
- public IngestRangeCreationResponse pagination(ImportPagination pagination) {
-
- this.pagination = pagination;
- return this;
- }
-
- /**
- * Get pagination
- * @return pagination
- **/
- @javax.annotation.Nullable
- @ApiModelProperty(value = "")
-
- public ImportPagination getPagination() {
- return pagination;
- }
-
-
- public void setPagination(ImportPagination pagination) {
- this.pagination = pagination;
- }
-
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
- IngestRangeCreationResponse ingestRangeCreationResponse = (IngestRangeCreationResponse) o;
- return Objects.equals(this.range, ingestRangeCreationResponse.range) &&
- Objects.equals(this.pagination, ingestRangeCreationResponse.pagination);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(range, pagination);
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder();
- sb.append("class IngestRangeCreationResponse {\n");
- sb.append(" range: ").append(toIndentedString(range)).append("\n");
- sb.append(" pagination: ").append(toIndentedString(pagination)).append("\n");
- sb.append("}");
- return sb.toString();
- }
-
- /**
- * Convert the given object to string with each line indented by 4 spaces
- * (except the first line).
- */
- private String toIndentedString(Object o) {
- if (o == null) {
- return "null";
- }
- return o.toString().replace("\n", "\n ");
- }
-
-}
-
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/ObjectStats.java b/clients/java/src/main/java/io/lakefs/clients/api/model/ObjectStats.java
index 7e2fb43f455..8b4b132b51d 100644
--- a/clients/java/src/main/java/io/lakefs/clients/api/model/ObjectStats.java
+++ b/clients/java/src/main/java/io/lakefs/clients/api/model/ObjectStats.java
@@ -192,11 +192,11 @@ public ObjectStats physicalAddressExpiry(Long physicalAddressExpiry) {
}
/**
- * If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*.
+ * If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*.
* @return physicalAddressExpiry
**/
@javax.annotation.Nullable
- @ApiModelProperty(value = "If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. ")
+ @ApiModelProperty(value = "If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. ")
public Long getPhysicalAddressExpiry() {
return physicalAddressExpiry;
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/StageRangeCreation.java b/clients/java/src/main/java/io/lakefs/clients/api/model/StageRangeCreation.java
deleted file mode 100644
index 2e52ec0ceeb..00000000000
--- a/clients/java/src/main/java/io/lakefs/clients/api/model/StageRangeCreation.java
+++ /dev/null
@@ -1,214 +0,0 @@
-/*
- * lakeFS API
- * lakeFS HTTP API
- *
- * The version of the OpenAPI document: 0.1.0
- *
- *
- * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
- * https://openapi-generator.tech
- * Do not edit the class manually.
- */
-
-
-package io.lakefs.clients.api.model;
-
-import java.util.Objects;
-import java.util.Arrays;
-import com.google.gson.TypeAdapter;
-import com.google.gson.annotations.JsonAdapter;
-import com.google.gson.annotations.SerializedName;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonWriter;
-import io.swagger.annotations.ApiModel;
-import io.swagger.annotations.ApiModelProperty;
-import java.io.IOException;
-
-/**
- * StageRangeCreation
- */
-@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen")
-public class StageRangeCreation {
- public static final String SERIALIZED_NAME_FROM_SOURCE_U_R_I = "fromSourceURI";
- @SerializedName(SERIALIZED_NAME_FROM_SOURCE_U_R_I)
- private String fromSourceURI;
-
- public static final String SERIALIZED_NAME_AFTER = "after";
- @SerializedName(SERIALIZED_NAME_AFTER)
- private String after;
-
- public static final String SERIALIZED_NAME_PREPEND = "prepend";
- @SerializedName(SERIALIZED_NAME_PREPEND)
- private String prepend;
-
- public static final String SERIALIZED_NAME_CONTINUATION_TOKEN = "continuation_token";
- @SerializedName(SERIALIZED_NAME_CONTINUATION_TOKEN)
- private String continuationToken;
-
- public static final String SERIALIZED_NAME_STAGING_TOKEN = "staging_token";
- @SerializedName(SERIALIZED_NAME_STAGING_TOKEN)
- private String stagingToken;
-
-
- public StageRangeCreation fromSourceURI(String fromSourceURI) {
-
- this.fromSourceURI = fromSourceURI;
- return this;
- }
-
- /**
- * The source location of the ingested files. Must match the lakeFS installation blockstore type.
- * @return fromSourceURI
- **/
- @javax.annotation.Nonnull
- @ApiModelProperty(example = "s3://my-bucket/production/collections/", required = true, value = "The source location of the ingested files. Must match the lakeFS installation blockstore type.")
-
- public String getFromSourceURI() {
- return fromSourceURI;
- }
-
-
- public void setFromSourceURI(String fromSourceURI) {
- this.fromSourceURI = fromSourceURI;
- }
-
-
- public StageRangeCreation after(String after) {
-
- this.after = after;
- return this;
- }
-
- /**
- * Only objects after this key would be ingested.
- * @return after
- **/
- @javax.annotation.Nonnull
- @ApiModelProperty(example = "production/collections/some/file.parquet", required = true, value = "Only objects after this key would be ingested.")
-
- public String getAfter() {
- return after;
- }
-
-
- public void setAfter(String after) {
- this.after = after;
- }
-
-
- public StageRangeCreation prepend(String prepend) {
-
- this.prepend = prepend;
- return this;
- }
-
- /**
- * A prefix to prepend to ingested objects.
- * @return prepend
- **/
- @javax.annotation.Nonnull
- @ApiModelProperty(example = "collections/", required = true, value = "A prefix to prepend to ingested objects.")
-
- public String getPrepend() {
- return prepend;
- }
-
-
- public void setPrepend(String prepend) {
- this.prepend = prepend;
- }
-
-
- public StageRangeCreation continuationToken(String continuationToken) {
-
- this.continuationToken = continuationToken;
- return this;
- }
-
- /**
- * Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key.
- * @return continuationToken
- **/
- @javax.annotation.Nullable
- @ApiModelProperty(value = "Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key.")
-
- public String getContinuationToken() {
- return continuationToken;
- }
-
-
- public void setContinuationToken(String continuationToken) {
- this.continuationToken = continuationToken;
- }
-
-
- public StageRangeCreation stagingToken(String stagingToken) {
-
- this.stagingToken = stagingToken;
- return this;
- }
-
- /**
- * Opaque. Client should pass staging_token if received from server on previous request
- * @return stagingToken
- **/
- @javax.annotation.Nullable
- @ApiModelProperty(value = "Opaque. Client should pass staging_token if received from server on previous request")
-
- public String getStagingToken() {
- return stagingToken;
- }
-
-
- public void setStagingToken(String stagingToken) {
- this.stagingToken = stagingToken;
- }
-
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
- StageRangeCreation stageRangeCreation = (StageRangeCreation) o;
- return Objects.equals(this.fromSourceURI, stageRangeCreation.fromSourceURI) &&
- Objects.equals(this.after, stageRangeCreation.after) &&
- Objects.equals(this.prepend, stageRangeCreation.prepend) &&
- Objects.equals(this.continuationToken, stageRangeCreation.continuationToken) &&
- Objects.equals(this.stagingToken, stageRangeCreation.stagingToken);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(fromSourceURI, after, prepend, continuationToken, stagingToken);
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder();
- sb.append("class StageRangeCreation {\n");
- sb.append(" fromSourceURI: ").append(toIndentedString(fromSourceURI)).append("\n");
- sb.append(" after: ").append(toIndentedString(after)).append("\n");
- sb.append(" prepend: ").append(toIndentedString(prepend)).append("\n");
- sb.append(" continuationToken: ").append(toIndentedString(continuationToken)).append("\n");
- sb.append(" stagingToken: ").append(toIndentedString(stagingToken)).append("\n");
- sb.append("}");
- return sb.toString();
- }
-
- /**
- * Convert the given object to string with each line indented by 4 spaces
- * (except the first line).
- */
- private String toIndentedString(Object o) {
- if (o == null) {
- return "null";
- }
- return o.toString().replace("\n", "\n ");
- }
-
-}
-
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/StagingLocation.java b/clients/java/src/main/java/io/lakefs/clients/api/model/StagingLocation.java
index e2a513ca1ba..1d379ae7541 100644
--- a/clients/java/src/main/java/io/lakefs/clients/api/model/StagingLocation.java
+++ b/clients/java/src/main/java/io/lakefs/clients/api/model/StagingLocation.java
@@ -101,11 +101,11 @@ public StagingLocation presignedUrl(String presignedUrl) {
}
/**
- * if presign=true is passed in the request, this field will contain a presigned URL to use when uploading
+ * if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading
* @return presignedUrl
**/
@javax.annotation.Nullable
- @ApiModelProperty(value = "if presign=true is passed in the request, this field will contain a presigned URL to use when uploading")
+ @ApiModelProperty(value = "if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading")
public String getPresignedUrl() {
return presignedUrl;
@@ -124,11 +124,11 @@ public StagingLocation presignedUrlExpiry(Long presignedUrlExpiry) {
}
/**
- * If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*.
+ * If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*.
* @return presignedUrlExpiry
**/
@javax.annotation.Nullable
- @ApiModelProperty(value = "If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. ")
+ @ApiModelProperty(value = "If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. ")
public Long getPresignedUrlExpiry() {
return presignedUrlExpiry;
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/UpdatePasswordByToken.java b/clients/java/src/main/java/io/lakefs/clients/api/model/UpdatePasswordByToken.java
deleted file mode 100644
index 41bf91425fb..00000000000
--- a/clients/java/src/main/java/io/lakefs/clients/api/model/UpdatePasswordByToken.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- * lakeFS API
- * lakeFS HTTP API
- *
- * The version of the OpenAPI document: 0.1.0
- *
- *
- * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
- * https://openapi-generator.tech
- * Do not edit the class manually.
- */
-
-
-package io.lakefs.clients.api.model;
-
-import java.util.Objects;
-import java.util.Arrays;
-import com.google.gson.TypeAdapter;
-import com.google.gson.annotations.JsonAdapter;
-import com.google.gson.annotations.SerializedName;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonWriter;
-import io.swagger.annotations.ApiModel;
-import io.swagger.annotations.ApiModelProperty;
-import java.io.IOException;
-
-/**
- * UpdatePasswordByToken
- */
-@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen")
-public class UpdatePasswordByToken {
- public static final String SERIALIZED_NAME_TOKEN = "token";
- @SerializedName(SERIALIZED_NAME_TOKEN)
- private String token;
-
- public static final String SERIALIZED_NAME_NEW_PASSWORD = "newPassword";
- @SerializedName(SERIALIZED_NAME_NEW_PASSWORD)
- private String newPassword;
-
- public static final String SERIALIZED_NAME_EMAIL = "email";
- @SerializedName(SERIALIZED_NAME_EMAIL)
- private String email;
-
-
- public UpdatePasswordByToken token(String token) {
-
- this.token = token;
- return this;
- }
-
- /**
- * token used for authentication
- * @return token
- **/
- @javax.annotation.Nonnull
- @ApiModelProperty(required = true, value = "token used for authentication")
-
- public String getToken() {
- return token;
- }
-
-
- public void setToken(String token) {
- this.token = token;
- }
-
-
- public UpdatePasswordByToken newPassword(String newPassword) {
-
- this.newPassword = newPassword;
- return this;
- }
-
- /**
- * new password to update
- * @return newPassword
- **/
- @javax.annotation.Nonnull
- @ApiModelProperty(required = true, value = "new password to update")
-
- public String getNewPassword() {
- return newPassword;
- }
-
-
- public void setNewPassword(String newPassword) {
- this.newPassword = newPassword;
- }
-
-
- public UpdatePasswordByToken email(String email) {
-
- this.email = email;
- return this;
- }
-
- /**
- * optional user email to match the token for verification
- * @return email
- **/
- @javax.annotation.Nullable
- @ApiModelProperty(value = "optional user email to match the token for verification")
-
- public String getEmail() {
- return email;
- }
-
-
- public void setEmail(String email) {
- this.email = email;
- }
-
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
- UpdatePasswordByToken updatePasswordByToken = (UpdatePasswordByToken) o;
- return Objects.equals(this.token, updatePasswordByToken.token) &&
- Objects.equals(this.newPassword, updatePasswordByToken.newPassword) &&
- Objects.equals(this.email, updatePasswordByToken.email);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(token, newPassword, email);
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder();
- sb.append("class UpdatePasswordByToken {\n");
- sb.append(" token: ").append(toIndentedString(token)).append("\n");
- sb.append(" newPassword: ").append(toIndentedString(newPassword)).append("\n");
- sb.append(" email: ").append(toIndentedString(email)).append("\n");
- sb.append("}");
- return sb.toString();
- }
-
- /**
- * Convert the given object to string with each line indented by 4 spaces
- * (except the first line).
- */
- private String toIndentedString(Object o) {
- if (o == null) {
- return "null";
- }
- return o.toString().replace("\n", "\n ");
- }
-
-}
-
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/User.java b/clients/java/src/main/java/io/lakefs/clients/api/model/User.java
index 0655e621207..24379fc6410 100644
--- a/clients/java/src/main/java/io/lakefs/clients/api/model/User.java
+++ b/clients/java/src/main/java/io/lakefs/clients/api/model/User.java
@@ -41,10 +41,6 @@ public class User {
@SerializedName(SERIALIZED_NAME_FRIENDLY_NAME)
private String friendlyName;
- public static final String SERIALIZED_NAME_EMAIL = "email";
- @SerializedName(SERIALIZED_NAME_EMAIL)
- private String email;
-
public User id(String id) {
@@ -53,11 +49,11 @@ public User id(String id) {
}
/**
- * a unique identifier for the user. In password-based authentication, this is the email.
+ * a unique identifier for the user.
* @return id
**/
@javax.annotation.Nonnull
- @ApiModelProperty(required = true, value = "a unique identifier for the user. In password-based authentication, this is the email.")
+ @ApiModelProperty(required = true, value = "a unique identifier for the user.")
public String getId() {
return id;
@@ -115,29 +111,6 @@ public void setFriendlyName(String friendlyName) {
}
- public User email(String email) {
-
- this.email = email;
- return this;
- }
-
- /**
- * Get email
- * @return email
- **/
- @javax.annotation.Nullable
- @ApiModelProperty(value = "")
-
- public String getEmail() {
- return email;
- }
-
-
- public void setEmail(String email) {
- this.email = email;
- }
-
-
@Override
public boolean equals(Object o) {
if (this == o) {
@@ -149,13 +122,12 @@ public boolean equals(Object o) {
User user = (User) o;
return Objects.equals(this.id, user.id) &&
Objects.equals(this.creationDate, user.creationDate) &&
- Objects.equals(this.friendlyName, user.friendlyName) &&
- Objects.equals(this.email, user.email);
+ Objects.equals(this.friendlyName, user.friendlyName);
}
@Override
public int hashCode() {
- return Objects.hash(id, creationDate, friendlyName, email);
+ return Objects.hash(id, creationDate, friendlyName);
}
@Override
@@ -165,7 +137,6 @@ public String toString() {
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" creationDate: ").append(toIndentedString(creationDate)).append("\n");
sb.append(" friendlyName: ").append(toIndentedString(friendlyName)).append("\n");
- sb.append(" email: ").append(toIndentedString(email)).append("\n");
sb.append("}");
return sb.toString();
}
diff --git a/clients/java/src/main/java/io/lakefs/clients/api/model/UserCreation.java b/clients/java/src/main/java/io/lakefs/clients/api/model/UserCreation.java
index f3d7a9eb482..7af62d415c8 100644
--- a/clients/java/src/main/java/io/lakefs/clients/api/model/UserCreation.java
+++ b/clients/java/src/main/java/io/lakefs/clients/api/model/UserCreation.java
@@ -45,11 +45,11 @@ public UserCreation id(String id) {
}
/**
- * a unique identifier for the user. In password-based authentication, this is the email.
+ * a unique identifier for the user.
* @return id
**/
@javax.annotation.Nonnull
- @ApiModelProperty(required = true, value = "a unique identifier for the user. In password-based authentication, this is the email.")
+ @ApiModelProperty(required = true, value = "a unique identifier for the user.")
public String getId() {
return id;
diff --git a/clients/java/src/test/java/io/lakefs/clients/api/AuthApiTest.java b/clients/java/src/test/java/io/lakefs/clients/api/AuthApiTest.java
index 83ba7f1aff3..d1450bf29d8 100644
--- a/clients/java/src/test/java/io/lakefs/clients/api/AuthApiTest.java
+++ b/clients/java/src/test/java/io/lakefs/clients/api/AuthApiTest.java
@@ -22,14 +22,12 @@
import io.lakefs.clients.api.model.CurrentUser;
import io.lakefs.clients.api.model.Error;
import io.lakefs.clients.api.model.ErrorNoACL;
-import io.lakefs.clients.api.model.ForgotPasswordRequest;
import io.lakefs.clients.api.model.Group;
import io.lakefs.clients.api.model.GroupCreation;
import io.lakefs.clients.api.model.GroupList;
import io.lakefs.clients.api.model.LoginInformation;
import io.lakefs.clients.api.model.Policy;
import io.lakefs.clients.api.model.PolicyList;
-import io.lakefs.clients.api.model.UpdatePasswordByToken;
import io.lakefs.clients.api.model.User;
import io.lakefs.clients.api.model.UserCreation;
import io.lakefs.clients.api.model.UserList;
@@ -267,21 +265,6 @@ public void detachPolicyFromUserTest() throws ApiException {
// TODO: test validations
}
- /**
- * forgot password request initiates the password reset process
- *
- *
- *
- * @throws ApiException
- * if the Api call fails
- */
- @Test
- public void forgotPasswordTest() throws ApiException {
- ForgotPasswordRequest forgotPasswordRequest = null;
- api.forgotPassword(forgotPasswordRequest);
- // TODO: test validations
- }
-
/**
* get credentials
*
@@ -545,21 +528,6 @@ public void setGroupACLTest() throws ApiException {
// TODO: test validations
}
- /**
- * Update user password by reset_password token
- *
- *
- *
- * @throws ApiException
- * if the Api call fails
- */
- @Test
- public void updatePasswordTest() throws ApiException {
- UpdatePasswordByToken updatePasswordByToken = null;
- api.updatePassword(updatePasswordByToken);
- // TODO: test validations
- }
-
/**
* update policy
*
diff --git a/clients/java/src/test/java/io/lakefs/clients/api/ImportApiTest.java b/clients/java/src/test/java/io/lakefs/clients/api/ImportApiTest.java
index a820297b909..a25cd6bade0 100644
--- a/clients/java/src/test/java/io/lakefs/clients/api/ImportApiTest.java
+++ b/clients/java/src/test/java/io/lakefs/clients/api/ImportApiTest.java
@@ -17,11 +17,7 @@
import io.lakefs.clients.api.model.Error;
import io.lakefs.clients.api.model.ImportCreation;
import io.lakefs.clients.api.model.ImportCreationResponse;
-import io.lakefs.clients.api.model.ImportStatusResp;
-import io.lakefs.clients.api.model.IngestRangeCreationResponse;
-import io.lakefs.clients.api.model.MetaRangeCreation;
-import io.lakefs.clients.api.model.MetaRangeCreationResponse;
-import io.lakefs.clients.api.model.StageRangeCreation;
+import io.lakefs.clients.api.model.ImportStatus;
import org.junit.Test;
import org.junit.Ignore;
@@ -39,22 +35,6 @@ public class ImportApiTest {
private final ImportApi api = new ImportApi();
- /**
- * create a lakeFS metarange file from the given ranges
- *
- *
- *
- * @throws ApiException
- * if the Api call fails
- */
- @Test
- public void createMetaRangeTest() throws ApiException {
- String repository = null;
- MetaRangeCreation metaRangeCreation = null;
- MetaRangeCreationResponse response = api.createMetaRange(repository, metaRangeCreation);
- // TODO: test validations
- }
-
/**
* cancel ongoing import
*
@@ -102,23 +82,7 @@ public void importStatusTest() throws ApiException {
String repository = null;
String branch = null;
String id = null;
- ImportStatusResp response = api.importStatus(repository, branch, id);
- // TODO: test validations
- }
-
- /**
- * create a lakeFS range file from the source uri
- *
- *
- *
- * @throws ApiException
- * if the Api call fails
- */
- @Test
- public void ingestRangeTest() throws ApiException {
- String repository = null;
- StageRangeCreation stageRangeCreation = null;
- IngestRangeCreationResponse response = api.ingestRange(repository, stageRangeCreation);
+ ImportStatus response = api.importStatus(repository, branch, id);
// TODO: test validations
}
diff --git a/clients/java/src/test/java/io/lakefs/clients/api/InternalApiTest.java b/clients/java/src/test/java/io/lakefs/clients/api/InternalApiTest.java
index d27a16781ec..44dff9228e2 100644
--- a/clients/java/src/test/java/io/lakefs/clients/api/InternalApiTest.java
+++ b/clients/java/src/test/java/io/lakefs/clients/api/InternalApiTest.java
@@ -21,7 +21,6 @@
import io.lakefs.clients.api.model.Setup;
import io.lakefs.clients.api.model.SetupState;
import io.lakefs.clients.api.model.StatsEventsList;
-import io.lakefs.clients.api.model.UpdateToken;
import org.junit.Test;
import org.junit.Ignore;
@@ -142,23 +141,6 @@ public void setupCommPrefsTest() throws ApiException {
// TODO: test validations
}
- /**
- * modify branch staging token
- *
- *
- *
- * @throws ApiException
- * if the Api call fails
- */
- @Test
- public void updateBranchTokenTest() throws ApiException {
- String repository = null;
- String branch = null;
- UpdateToken updateToken = null;
- api.updateBranchToken(repository, branch, updateToken);
- // TODO: test validations
- }
-
/**
*
*
diff --git a/clients/java/src/test/java/io/lakefs/clients/api/model/ForgotPasswordRequestTest.java b/clients/java/src/test/java/io/lakefs/clients/api/model/ForgotPasswordRequestTest.java
deleted file mode 100644
index b0628f03581..00000000000
--- a/clients/java/src/test/java/io/lakefs/clients/api/model/ForgotPasswordRequestTest.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * lakeFS API
- * lakeFS HTTP API
- *
- * The version of the OpenAPI document: 0.1.0
- *
- *
- * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
- * https://openapi-generator.tech
- * Do not edit the class manually.
- */
-
-
-package io.lakefs.clients.api.model;
-
-import com.google.gson.TypeAdapter;
-import com.google.gson.annotations.JsonAdapter;
-import com.google.gson.annotations.SerializedName;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonWriter;
-import io.swagger.annotations.ApiModel;
-import io.swagger.annotations.ApiModelProperty;
-import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
-
-
-/**
- * Model tests for ForgotPasswordRequest
- */
-public class ForgotPasswordRequestTest {
- private final ForgotPasswordRequest model = new ForgotPasswordRequest();
-
- /**
- * Model tests for ForgotPasswordRequest
- */
- @Test
- public void testForgotPasswordRequest() {
- // TODO: test ForgotPasswordRequest
- }
-
- /**
- * Test the property 'email'
- */
- @Test
- public void emailTest() {
- // TODO: test email
- }
-
-}
diff --git a/clients/java/src/test/java/io/lakefs/clients/api/model/ImportPaginationTest.java b/clients/java/src/test/java/io/lakefs/clients/api/model/ImportPaginationTest.java
deleted file mode 100644
index 11bc94b7281..00000000000
--- a/clients/java/src/test/java/io/lakefs/clients/api/model/ImportPaginationTest.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * lakeFS API
- * lakeFS HTTP API
- *
- * The version of the OpenAPI document: 0.1.0
- *
- *
- * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
- * https://openapi-generator.tech
- * Do not edit the class manually.
- */
-
-
-package io.lakefs.clients.api.model;
-
-import com.google.gson.TypeAdapter;
-import com.google.gson.annotations.JsonAdapter;
-import com.google.gson.annotations.SerializedName;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonWriter;
-import io.swagger.annotations.ApiModel;
-import io.swagger.annotations.ApiModelProperty;
-import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
-
-
-/**
- * Model tests for ImportPagination
- */
-public class ImportPaginationTest {
- private final ImportPagination model = new ImportPagination();
-
- /**
- * Model tests for ImportPagination
- */
- @Test
- public void testImportPagination() {
- // TODO: test ImportPagination
- }
-
- /**
- * Test the property 'hasMore'
- */
- @Test
- public void hasMoreTest() {
- // TODO: test hasMore
- }
-
- /**
- * Test the property 'continuationToken'
- */
- @Test
- public void continuationTokenTest() {
- // TODO: test continuationToken
- }
-
- /**
- * Test the property 'lastKey'
- */
- @Test
- public void lastKeyTest() {
- // TODO: test lastKey
- }
-
- /**
- * Test the property 'stagingToken'
- */
- @Test
- public void stagingTokenTest() {
- // TODO: test stagingToken
- }
-
-}
diff --git a/clients/java/src/test/java/io/lakefs/clients/api/model/ImportStatusRespTest.java b/clients/java/src/test/java/io/lakefs/clients/api/model/ImportStatusTest.java
similarity index 86%
rename from clients/java/src/test/java/io/lakefs/clients/api/model/ImportStatusRespTest.java
rename to clients/java/src/test/java/io/lakefs/clients/api/model/ImportStatusTest.java
index 5c72f284475..5ba7f313f91 100644
--- a/clients/java/src/test/java/io/lakefs/clients/api/model/ImportStatusRespTest.java
+++ b/clients/java/src/test/java/io/lakefs/clients/api/model/ImportStatusTest.java
@@ -30,17 +30,17 @@
/**
- * Model tests for ImportStatusResp
+ * Model tests for ImportStatus
*/
-public class ImportStatusRespTest {
- private final ImportStatusResp model = new ImportStatusResp();
+public class ImportStatusTest {
+ private final ImportStatus model = new ImportStatus();
/**
- * Model tests for ImportStatusResp
+ * Model tests for ImportStatus
*/
@Test
- public void testImportStatusResp() {
- // TODO: test ImportStatusResp
+ public void testImportStatus() {
+ // TODO: test ImportStatus
}
/**
diff --git a/clients/java/src/test/java/io/lakefs/clients/api/model/IngestRangeCreationResponseTest.java b/clients/java/src/test/java/io/lakefs/clients/api/model/IngestRangeCreationResponseTest.java
deleted file mode 100644
index 175a9cb5d24..00000000000
--- a/clients/java/src/test/java/io/lakefs/clients/api/model/IngestRangeCreationResponseTest.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * lakeFS API
- * lakeFS HTTP API
- *
- * The version of the OpenAPI document: 0.1.0
- *
- *
- * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
- * https://openapi-generator.tech
- * Do not edit the class manually.
- */
-
-
-package io.lakefs.clients.api.model;
-
-import com.google.gson.TypeAdapter;
-import com.google.gson.annotations.JsonAdapter;
-import com.google.gson.annotations.SerializedName;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonWriter;
-import io.lakefs.clients.api.model.ImportPagination;
-import io.lakefs.clients.api.model.RangeMetadata;
-import io.swagger.annotations.ApiModel;
-import io.swagger.annotations.ApiModelProperty;
-import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
-
-
-/**
- * Model tests for IngestRangeCreationResponse
- */
-public class IngestRangeCreationResponseTest {
- private final IngestRangeCreationResponse model = new IngestRangeCreationResponse();
-
- /**
- * Model tests for IngestRangeCreationResponse
- */
- @Test
- public void testIngestRangeCreationResponse() {
- // TODO: test IngestRangeCreationResponse
- }
-
- /**
- * Test the property 'range'
- */
- @Test
- public void rangeTest() {
- // TODO: test range
- }
-
- /**
- * Test the property 'pagination'
- */
- @Test
- public void paginationTest() {
- // TODO: test pagination
- }
-
-}
diff --git a/clients/java/src/test/java/io/lakefs/clients/api/model/StageRangeCreationTest.java b/clients/java/src/test/java/io/lakefs/clients/api/model/StageRangeCreationTest.java
deleted file mode 100644
index adad6b988af..00000000000
--- a/clients/java/src/test/java/io/lakefs/clients/api/model/StageRangeCreationTest.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * lakeFS API
- * lakeFS HTTP API
- *
- * The version of the OpenAPI document: 0.1.0
- *
- *
- * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
- * https://openapi-generator.tech
- * Do not edit the class manually.
- */
-
-
-package io.lakefs.clients.api.model;
-
-import com.google.gson.TypeAdapter;
-import com.google.gson.annotations.JsonAdapter;
-import com.google.gson.annotations.SerializedName;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonWriter;
-import io.swagger.annotations.ApiModel;
-import io.swagger.annotations.ApiModelProperty;
-import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
-
-
-/**
- * Model tests for StageRangeCreation
- */
-public class StageRangeCreationTest {
- private final StageRangeCreation model = new StageRangeCreation();
-
- /**
- * Model tests for StageRangeCreation
- */
- @Test
- public void testStageRangeCreation() {
- // TODO: test StageRangeCreation
- }
-
- /**
- * Test the property 'fromSourceURI'
- */
- @Test
- public void fromSourceURITest() {
- // TODO: test fromSourceURI
- }
-
- /**
- * Test the property 'after'
- */
- @Test
- public void afterTest() {
- // TODO: test after
- }
-
- /**
- * Test the property 'prepend'
- */
- @Test
- public void prependTest() {
- // TODO: test prepend
- }
-
- /**
- * Test the property 'continuationToken'
- */
- @Test
- public void continuationTokenTest() {
- // TODO: test continuationToken
- }
-
- /**
- * Test the property 'stagingToken'
- */
- @Test
- public void stagingTokenTest() {
- // TODO: test stagingToken
- }
-
-}
diff --git a/clients/java/src/test/java/io/lakefs/clients/api/model/UpdatePasswordByTokenTest.java b/clients/java/src/test/java/io/lakefs/clients/api/model/UpdatePasswordByTokenTest.java
deleted file mode 100644
index d4537f35c5c..00000000000
--- a/clients/java/src/test/java/io/lakefs/clients/api/model/UpdatePasswordByTokenTest.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * lakeFS API
- * lakeFS HTTP API
- *
- * The version of the OpenAPI document: 0.1.0
- *
- *
- * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
- * https://openapi-generator.tech
- * Do not edit the class manually.
- */
-
-
-package io.lakefs.clients.api.model;
-
-import com.google.gson.TypeAdapter;
-import com.google.gson.annotations.JsonAdapter;
-import com.google.gson.annotations.SerializedName;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonWriter;
-import io.swagger.annotations.ApiModel;
-import io.swagger.annotations.ApiModelProperty;
-import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
-
-
-/**
- * Model tests for UpdatePasswordByToken
- */
-public class UpdatePasswordByTokenTest {
- private final UpdatePasswordByToken model = new UpdatePasswordByToken();
-
- /**
- * Model tests for UpdatePasswordByToken
- */
- @Test
- public void testUpdatePasswordByToken() {
- // TODO: test UpdatePasswordByToken
- }
-
- /**
- * Test the property 'token'
- */
- @Test
- public void tokenTest() {
- // TODO: test token
- }
-
- /**
- * Test the property 'newPassword'
- */
- @Test
- public void newPasswordTest() {
- // TODO: test newPassword
- }
-
- /**
- * Test the property 'email'
- */
- @Test
- public void emailTest() {
- // TODO: test email
- }
-
-}
diff --git a/clients/java/src/test/java/io/lakefs/clients/api/model/UserTest.java b/clients/java/src/test/java/io/lakefs/clients/api/model/UserTest.java
index 9ca002a8bb6..d347ee8a5b6 100644
--- a/clients/java/src/test/java/io/lakefs/clients/api/model/UserTest.java
+++ b/clients/java/src/test/java/io/lakefs/clients/api/model/UserTest.java
@@ -64,12 +64,4 @@ public void friendlyNameTest() {
// TODO: test friendlyName
}
- /**
- * Test the property 'email'
- */
- @Test
- public void emailTest() {
- // TODO: test email
- }
-
}
diff --git a/clients/python/.openapi-generator/FILES b/clients/python/.openapi-generator/FILES
index e77e0047b14..44f38171c71 100644
--- a/clients/python/.openapi-generator/FILES
+++ b/clients/python/.openapi-generator/FILES
@@ -30,7 +30,6 @@ docs/Error.md
docs/ErrorNoACL.md
docs/ExperimentalApi.md
docs/FindMergeBaseResult.md
-docs/ForgotPasswordRequest.md
docs/GarbageCollectionConfig.md
docs/GarbageCollectionPrepareRequest.md
docs/GarbageCollectionPrepareResponse.md
@@ -46,9 +45,7 @@ docs/ImportApi.md
docs/ImportCreation.md
docs/ImportCreationResponse.md
docs/ImportLocation.md
-docs/ImportPagination.md
-docs/ImportStatusResp.md
-docs/IngestRangeCreationResponse.md
+docs/ImportStatus.md
docs/InlineObject.md
docs/InlineObject1.md
docs/InternalApi.md
@@ -91,7 +88,6 @@ docs/RetentionApi.md
docs/RevertCreation.md
docs/Setup.md
docs/SetupState.md
-docs/StageRangeCreation.md
docs/StagingApi.md
docs/StagingLocation.md
docs/StagingMetadata.md
@@ -103,7 +99,6 @@ docs/StorageURI.md
docs/TagCreation.md
docs/TagsApi.md
docs/UnderlyingObjectProperties.md
-docs/UpdatePasswordByToken.md
docs/UpdateToken.md
docs/User.md
docs/UserCreation.md
@@ -156,7 +151,6 @@ lakefs_client/model/diff_properties.py
lakefs_client/model/error.py
lakefs_client/model/error_no_acl.py
lakefs_client/model/find_merge_base_result.py
-lakefs_client/model/forgot_password_request.py
lakefs_client/model/garbage_collection_config.py
lakefs_client/model/garbage_collection_prepare_request.py
lakefs_client/model/garbage_collection_prepare_response.py
@@ -170,9 +164,7 @@ lakefs_client/model/hook_run_list.py
lakefs_client/model/import_creation.py
lakefs_client/model/import_creation_response.py
lakefs_client/model/import_location.py
-lakefs_client/model/import_pagination.py
-lakefs_client/model/import_status_resp.py
-lakefs_client/model/ingest_range_creation_response.py
+lakefs_client/model/import_status.py
lakefs_client/model/inline_object.py
lakefs_client/model/inline_object1.py
lakefs_client/model/login_config.py
@@ -209,7 +201,6 @@ lakefs_client/model/reset_creation.py
lakefs_client/model/revert_creation.py
lakefs_client/model/setup.py
lakefs_client/model/setup_state.py
-lakefs_client/model/stage_range_creation.py
lakefs_client/model/staging_location.py
lakefs_client/model/staging_metadata.py
lakefs_client/model/statement.py
@@ -219,7 +210,6 @@ lakefs_client/model/storage_config.py
lakefs_client/model/storage_uri.py
lakefs_client/model/tag_creation.py
lakefs_client/model/underlying_object_properties.py
-lakefs_client/model/update_password_by_token.py
lakefs_client/model/update_token.py
lakefs_client/model/user.py
lakefs_client/model/user_creation.py
@@ -262,7 +252,6 @@ test/test_error.py
test/test_error_no_acl.py
test/test_experimental_api.py
test/test_find_merge_base_result.py
-test/test_forgot_password_request.py
test/test_garbage_collection_config.py
test/test_garbage_collection_prepare_request.py
test/test_garbage_collection_prepare_response.py
@@ -278,9 +267,7 @@ test/test_import_api.py
test/test_import_creation.py
test/test_import_creation_response.py
test/test_import_location.py
-test/test_import_pagination.py
-test/test_import_status_resp.py
-test/test_ingest_range_creation_response.py
+test/test_import_status.py
test/test_inline_object.py
test/test_inline_object1.py
test/test_internal_api.py
@@ -323,7 +310,6 @@ test/test_retention_api.py
test/test_revert_creation.py
test/test_setup.py
test/test_setup_state.py
-test/test_stage_range_creation.py
test/test_staging_api.py
test/test_staging_location.py
test/test_staging_metadata.py
@@ -335,7 +321,6 @@ test/test_storage_uri.py
test/test_tag_creation.py
test/test_tags_api.py
test/test_underlying_object_properties.py
-test/test_update_password_by_token.py
test/test_update_token.py
test/test_user.py
test/test_user_creation.py
diff --git a/clients/python/README.md b/clients/python/README.md
index d40a5a0406d..f3f42893be4 100644
--- a/clients/python/README.md
+++ b/clients/python/README.md
@@ -134,7 +134,6 @@ Class | Method | HTTP request | Description
*AuthApi* | [**delete_user**](docs/AuthApi.md#delete_user) | **DELETE** /auth/users/{userId} | delete user
*AuthApi* | [**detach_policy_from_group**](docs/AuthApi.md#detach_policy_from_group) | **DELETE** /auth/groups/{groupId}/policies/{policyId} | detach policy from group
*AuthApi* | [**detach_policy_from_user**](docs/AuthApi.md#detach_policy_from_user) | **DELETE** /auth/users/{userId}/policies/{policyId} | detach policy from user
-*AuthApi* | [**forgot_password**](docs/AuthApi.md#forgot_password) | **POST** /auth/password/forgot | forgot password request initiates the password reset process
*AuthApi* | [**get_credentials**](docs/AuthApi.md#get_credentials) | **GET** /auth/users/{userId}/credentials/{accessKeyId} | get credentials
*AuthApi* | [**get_current_user**](docs/AuthApi.md#get_current_user) | **GET** /user | get current user
*AuthApi* | [**get_group**](docs/AuthApi.md#get_group) | **GET** /auth/groups/{groupId} | get group
@@ -151,7 +150,6 @@ Class | Method | HTTP request | Description
*AuthApi* | [**list_users**](docs/AuthApi.md#list_users) | **GET** /auth/users | list users
*AuthApi* | [**login**](docs/AuthApi.md#login) | **POST** /auth/login | perform a login
*AuthApi* | [**set_group_acl**](docs/AuthApi.md#set_group_acl) | **POST** /auth/groups/{groupId}/acl | set ACL of group
-*AuthApi* | [**update_password**](docs/AuthApi.md#update_password) | **POST** /auth/password | Update user password by reset_password token
*AuthApi* | [**update_policy**](docs/AuthApi.md#update_policy) | **PUT** /auth/policies/{policyId} | update policy
*BranchesApi* | [**cherry_pick**](docs/BranchesApi.md#cherry_pick) | **POST** /repositories/{repository}/branches/{branch}/cherry-pick | Replay the changes from the given commit on the branch
*BranchesApi* | [**create_branch**](docs/BranchesApi.md#create_branch) | **POST** /repositories/{repository}/branches | create branch
@@ -169,11 +167,9 @@ Class | Method | HTTP request | Description
*ExperimentalApi* | [**get_otf_diffs**](docs/ExperimentalApi.md#get_otf_diffs) | **GET** /otf/diffs | get the available Open Table Format diffs
*ExperimentalApi* | [**otf_diff**](docs/ExperimentalApi.md#otf_diff) | **GET** /repositories/{repository}/otf/refs/{left_ref}/diff/{right_ref} | perform otf diff
*HealthCheckApi* | [**health_check**](docs/HealthCheckApi.md#health_check) | **GET** /healthcheck |
-*ImportApi* | [**create_meta_range**](docs/ImportApi.md#create_meta_range) | **POST** /repositories/{repository}/branches/metaranges | create a lakeFS metarange file from the given ranges
*ImportApi* | [**import_cancel**](docs/ImportApi.md#import_cancel) | **DELETE** /repositories/{repository}/branches/{branch}/import | cancel ongoing import
*ImportApi* | [**import_start**](docs/ImportApi.md#import_start) | **POST** /repositories/{repository}/branches/{branch}/import | import data from object store
*ImportApi* | [**import_status**](docs/ImportApi.md#import_status) | **GET** /repositories/{repository}/branches/{branch}/import | get import status
-*ImportApi* | [**ingest_range**](docs/ImportApi.md#ingest_range) | **POST** /repositories/{repository}/branches/ranges | create a lakeFS range file from the source uri
*InternalApi* | [**create_branch_protection_rule_preflight**](docs/InternalApi.md#create_branch_protection_rule_preflight) | **GET** /repositories/{repository}/branch_protection/set_allowed |
*InternalApi* | [**get_auth_capabilities**](docs/InternalApi.md#get_auth_capabilities) | **GET** /auth/capabilities | list authentication capabilities supported
*InternalApi* | [**get_setup_state**](docs/InternalApi.md#get_setup_state) | **GET** /setup_lakefs | check if the lakeFS installation is already set up
@@ -181,7 +177,6 @@ Class | Method | HTTP request | Description
*InternalApi* | [**set_garbage_collection_rules_preflight**](docs/InternalApi.md#set_garbage_collection_rules_preflight) | **GET** /repositories/{repository}/gc/rules/set_allowed |
*InternalApi* | [**setup**](docs/InternalApi.md#setup) | **POST** /setup_lakefs | setup lakeFS and create a first user
*InternalApi* | [**setup_comm_prefs**](docs/InternalApi.md#setup_comm_prefs) | **POST** /setup_comm_prefs | setup communications preferences
-*InternalApi* | [**update_branch_token**](docs/InternalApi.md#update_branch_token) | **PUT** /repositories/{repository}/branches/{branch}/update_token | modify branch staging token
*InternalApi* | [**upload_object_preflight**](docs/InternalApi.md#upload_object_preflight) | **GET** /repositories/{repository}/branches/{branch}/objects/stage_allowed |
*MetadataApi* | [**create_symlink_file**](docs/MetadataApi.md#create_symlink_file) | **POST** /repositories/{repository}/refs/{branch}/symlink | creates symlink files corresponding to the given directory
*MetadataApi* | [**get_meta_range**](docs/MetadataApi.md#get_meta_range) | **GET** /repositories/{repository}/metadata/meta_range/{meta_range} | return URI to a meta-range file
@@ -248,7 +243,6 @@ Class | Method | HTTP request | Description
- [Error](docs/Error.md)
- [ErrorNoACL](docs/ErrorNoACL.md)
- [FindMergeBaseResult](docs/FindMergeBaseResult.md)
- - [ForgotPasswordRequest](docs/ForgotPasswordRequest.md)
- [GarbageCollectionConfig](docs/GarbageCollectionConfig.md)
- [GarbageCollectionPrepareRequest](docs/GarbageCollectionPrepareRequest.md)
- [GarbageCollectionPrepareResponse](docs/GarbageCollectionPrepareResponse.md)
@@ -262,9 +256,7 @@ Class | Method | HTTP request | Description
- [ImportCreation](docs/ImportCreation.md)
- [ImportCreationResponse](docs/ImportCreationResponse.md)
- [ImportLocation](docs/ImportLocation.md)
- - [ImportPagination](docs/ImportPagination.md)
- - [ImportStatusResp](docs/ImportStatusResp.md)
- - [IngestRangeCreationResponse](docs/IngestRangeCreationResponse.md)
+ - [ImportStatus](docs/ImportStatus.md)
- [InlineObject](docs/InlineObject.md)
- [InlineObject1](docs/InlineObject1.md)
- [LoginConfig](docs/LoginConfig.md)
@@ -301,7 +293,6 @@ Class | Method | HTTP request | Description
- [RevertCreation](docs/RevertCreation.md)
- [Setup](docs/Setup.md)
- [SetupState](docs/SetupState.md)
- - [StageRangeCreation](docs/StageRangeCreation.md)
- [StagingLocation](docs/StagingLocation.md)
- [StagingMetadata](docs/StagingMetadata.md)
- [Statement](docs/Statement.md)
@@ -311,7 +302,6 @@ Class | Method | HTTP request | Description
- [StorageURI](docs/StorageURI.md)
- [TagCreation](docs/TagCreation.md)
- [UnderlyingObjectProperties](docs/UnderlyingObjectProperties.md)
- - [UpdatePasswordByToken](docs/UpdatePasswordByToken.md)
- [UpdateToken](docs/UpdateToken.md)
- [User](docs/User.md)
- [UserCreation](docs/UserCreation.md)
diff --git a/clients/python/docs/AuthApi.md b/clients/python/docs/AuthApi.md
index 4f83baa0045..713d08444f3 100644
--- a/clients/python/docs/AuthApi.md
+++ b/clients/python/docs/AuthApi.md
@@ -18,7 +18,6 @@ Method | HTTP request | Description
[**delete_user**](AuthApi.md#delete_user) | **DELETE** /auth/users/{userId} | delete user
[**detach_policy_from_group**](AuthApi.md#detach_policy_from_group) | **DELETE** /auth/groups/{groupId}/policies/{policyId} | detach policy from group
[**detach_policy_from_user**](AuthApi.md#detach_policy_from_user) | **DELETE** /auth/users/{userId}/policies/{policyId} | detach policy from user
-[**forgot_password**](AuthApi.md#forgot_password) | **POST** /auth/password/forgot | forgot password request initiates the password reset process
[**get_credentials**](AuthApi.md#get_credentials) | **GET** /auth/users/{userId}/credentials/{accessKeyId} | get credentials
[**get_current_user**](AuthApi.md#get_current_user) | **GET** /user | get current user
[**get_group**](AuthApi.md#get_group) | **GET** /auth/groups/{groupId} | get group
@@ -35,7 +34,6 @@ Method | HTTP request | Description
[**list_users**](AuthApi.md#list_users) | **GET** /auth/users | list users
[**login**](AuthApi.md#login) | **POST** /auth/login | perform a login
[**set_group_acl**](AuthApi.md#set_group_acl) | **POST** /auth/groups/{groupId}/acl | set ACL of group
-[**update_password**](AuthApi.md#update_password) | **POST** /auth/password | Update user password by reset_password token
[**update_policy**](AuthApi.md#update_policy) | **PUT** /auth/policies/{policyId} | update policy
@@ -1554,75 +1552,6 @@ void (empty response body)
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
-# **forgot_password**
-> forgot_password(forgot_password_request)
-
-forgot password request initiates the password reset process
-
-### Example
-
-
-```python
-import time
-import lakefs_client
-from lakefs_client.api import auth_api
-from lakefs_client.model.forgot_password_request import ForgotPasswordRequest
-from lakefs_client.model.error import Error
-from pprint import pprint
-# Defining the host is optional and defaults to http://localhost/api/v1
-# See configuration.py for a list of all supported configuration parameters.
-configuration = lakefs_client.Configuration(
- host = "http://localhost/api/v1"
-)
-
-
-# Enter a context with an instance of the API client
-with lakefs_client.ApiClient() as api_client:
- # Create an instance of the API class
- api_instance = auth_api.AuthApi(api_client)
- forgot_password_request = ForgotPasswordRequest(
- email="email_example",
- ) # ForgotPasswordRequest |
-
- # example passing only required values which don't have defaults set
- try:
- # forgot password request initiates the password reset process
- api_instance.forgot_password(forgot_password_request)
- except lakefs_client.ApiException as e:
- print("Exception when calling AuthApi->forgot_password: %s\n" % e)
-```
-
-
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
- **forgot_password_request** | [**ForgotPasswordRequest**](ForgotPasswordRequest.md)| |
-
-### Return type
-
-void (empty response body)
-
-### Authorization
-
-No authorization required
-
-### HTTP request headers
-
- - **Content-Type**: application/json
- - **Accept**: application/json
-
-
-### HTTP response details
-
-| Status code | Description | Response headers |
-|-------------|-------------|------------------|
-**204** | No content | - |
-**400** | Bad Request | - |
-**0** | Internal Server Error | - |
-
-[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
-
# **get_credentials**
> Credentials get_credentials(user_id, access_key_id)
@@ -2375,7 +2304,7 @@ Name | Type | Description | Notes
| Status code | Description | Response headers |
|-------------|-------------|------------------|
-**200** | group memeber list | - |
+**200** | group member list | - |
**401** | Unauthorized | - |
**0** | Internal Server Error | - |
@@ -3387,88 +3316,6 @@ void (empty response body)
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
-# **update_password**
-> update_password(update_password_by_token)
-
-Update user password by reset_password token
-
-### Example
-
-* Api Key Authentication (cookie_auth):
-
-```python
-import time
-import lakefs_client
-from lakefs_client.api import auth_api
-from lakefs_client.model.update_password_by_token import UpdatePasswordByToken
-from lakefs_client.model.error import Error
-from pprint import pprint
-# Defining the host is optional and defaults to http://localhost/api/v1
-# See configuration.py for a list of all supported configuration parameters.
-configuration = lakefs_client.Configuration(
- host = "http://localhost/api/v1"
-)
-
-# The client must configure the authentication and authorization parameters
-# in accordance with the API server security policy.
-# Examples for each auth method are provided below, use the example that
-# satisfies your auth use case.
-
-# Configure API key authorization: cookie_auth
-configuration.api_key['cookie_auth'] = 'YOUR_API_KEY'
-
-# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
-# configuration.api_key_prefix['cookie_auth'] = 'Bearer'
-
-# Enter a context with an instance of the API client
-with lakefs_client.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = auth_api.AuthApi(api_client)
- update_password_by_token = UpdatePasswordByToken(
- token="token_example",
- new_password="new_password_example",
- email="email_example",
- ) # UpdatePasswordByToken |
-
- # example passing only required values which don't have defaults set
- try:
- # Update user password by reset_password token
- api_instance.update_password(update_password_by_token)
- except lakefs_client.ApiException as e:
- print("Exception when calling AuthApi->update_password: %s\n" % e)
-```
-
-
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
- **update_password_by_token** | [**UpdatePasswordByToken**](UpdatePasswordByToken.md)| |
-
-### Return type
-
-void (empty response body)
-
-### Authorization
-
-[cookie_auth](../README.md#cookie_auth)
-
-### HTTP request headers
-
- - **Content-Type**: application/json
- - **Accept**: application/json
-
-
-### HTTP response details
-
-| Status code | Description | Response headers |
-|-------------|-------------|------------------|
-**201** | successful reset | - |
-**401** | Unauthorized | - |
-**0** | Internal Server Error | - |
-
-[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
-
# **update_policy**
> Policy update_policy(policy_id, policy)
diff --git a/clients/python/docs/ForgotPasswordRequest.md b/clients/python/docs/ForgotPasswordRequest.md
deleted file mode 100644
index ab61e973795..00000000000
--- a/clients/python/docs/ForgotPasswordRequest.md
+++ /dev/null
@@ -1,12 +0,0 @@
-# ForgotPasswordRequest
-
-
-## Properties
-Name | Type | Description | Notes
------------- | ------------- | ------------- | -------------
-**email** | **str** | |
-**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional]
-
-[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
-
-
diff --git a/clients/python/docs/ImportApi.md b/clients/python/docs/ImportApi.md
index 2816366428b..a14c88227af 100644
--- a/clients/python/docs/ImportApi.md
+++ b/clients/python/docs/ImportApi.md
@@ -4,135 +4,11 @@ All URIs are relative to *http://localhost/api/v1*
Method | HTTP request | Description
------------- | ------------- | -------------
-[**create_meta_range**](ImportApi.md#create_meta_range) | **POST** /repositories/{repository}/branches/metaranges | create a lakeFS metarange file from the given ranges
[**import_cancel**](ImportApi.md#import_cancel) | **DELETE** /repositories/{repository}/branches/{branch}/import | cancel ongoing import
[**import_start**](ImportApi.md#import_start) | **POST** /repositories/{repository}/branches/{branch}/import | import data from object store
[**import_status**](ImportApi.md#import_status) | **GET** /repositories/{repository}/branches/{branch}/import | get import status
-[**ingest_range**](ImportApi.md#ingest_range) | **POST** /repositories/{repository}/branches/ranges | create a lakeFS range file from the source uri
-# **create_meta_range**
-> MetaRangeCreationResponse create_meta_range(repository, meta_range_creation)
-
-create a lakeFS metarange file from the given ranges
-
-### Example
-
-* Basic Authentication (basic_auth):
-* Api Key Authentication (cookie_auth):
-* Bearer (JWT) Authentication (jwt_token):
-* Api Key Authentication (oidc_auth):
-* Api Key Authentication (saml_auth):
-
-```python
-import time
-import lakefs_client
-from lakefs_client.api import import_api
-from lakefs_client.model.meta_range_creation import MetaRangeCreation
-from lakefs_client.model.meta_range_creation_response import MetaRangeCreationResponse
-from lakefs_client.model.error import Error
-from pprint import pprint
-# Defining the host is optional and defaults to http://localhost/api/v1
-# See configuration.py for a list of all supported configuration parameters.
-configuration = lakefs_client.Configuration(
- host = "http://localhost/api/v1"
-)
-
-# The client must configure the authentication and authorization parameters
-# in accordance with the API server security policy.
-# Examples for each auth method are provided below, use the example that
-# satisfies your auth use case.
-
-# Configure HTTP basic authorization: basic_auth
-configuration = lakefs_client.Configuration(
- username = 'YOUR_USERNAME',
- password = 'YOUR_PASSWORD'
-)
-
-# Configure API key authorization: cookie_auth
-configuration.api_key['cookie_auth'] = 'YOUR_API_KEY'
-
-# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
-# configuration.api_key_prefix['cookie_auth'] = 'Bearer'
-
-# Configure Bearer authorization (JWT): jwt_token
-configuration = lakefs_client.Configuration(
- access_token = 'YOUR_BEARER_TOKEN'
-)
-
-# Configure API key authorization: oidc_auth
-configuration.api_key['oidc_auth'] = 'YOUR_API_KEY'
-
-# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
-# configuration.api_key_prefix['oidc_auth'] = 'Bearer'
-
-# Configure API key authorization: saml_auth
-configuration.api_key['saml_auth'] = 'YOUR_API_KEY'
-
-# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
-# configuration.api_key_prefix['saml_auth'] = 'Bearer'
-
-# Enter a context with an instance of the API client
-with lakefs_client.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = import_api.ImportApi(api_client)
- repository = "repository_example" # str |
- meta_range_creation = MetaRangeCreation(
- ranges=[
- RangeMetadata(
- id="480e19972a6fbe98ab8e81ae5efdfd1a29037587e91244e87abd4adefffdb01c",
- min_key="production/collections/some/file_1.parquet",
- max_key="production/collections/some/file_8229.parquet",
- count=1,
- estimated_size=1,
- ),
- ],
- ) # MetaRangeCreation |
-
- # example passing only required values which don't have defaults set
- try:
- # create a lakeFS metarange file from the given ranges
- api_response = api_instance.create_meta_range(repository, meta_range_creation)
- pprint(api_response)
- except lakefs_client.ApiException as e:
- print("Exception when calling ImportApi->create_meta_range: %s\n" % e)
-```
-
-
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
- **repository** | **str**| |
- **meta_range_creation** | [**MetaRangeCreation**](MetaRangeCreation.md)| |
-
-### Return type
-
-[**MetaRangeCreationResponse**](MetaRangeCreationResponse.md)
-
-### Authorization
-
-[basic_auth](../README.md#basic_auth), [cookie_auth](../README.md#cookie_auth), [jwt_token](../README.md#jwt_token), [oidc_auth](../README.md#oidc_auth), [saml_auth](../README.md#saml_auth)
-
-### HTTP request headers
-
- - **Content-Type**: application/json
- - **Accept**: application/json
-
-
-### HTTP response details
-
-| Status code | Description | Response headers |
-|-------------|-------------|------------------|
-**201** | metarange metadata | - |
-**400** | Validation Error | - |
-**401** | Unauthorized | - |
-**403** | Forbidden | - |
-**404** | Resource Not Found | - |
-**0** | Internal Server Error | - |
-
-[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
-
# **import_cancel**
> import_cancel(repository, branch, id)
@@ -373,7 +249,7 @@ Name | Type | Description | Notes
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
# **import_status**
-> ImportStatusResp import_status(repository, branch, id)
+> ImportStatus import_status(repository, branch, id)
get import status
@@ -389,7 +265,7 @@ get import status
import time
import lakefs_client
from lakefs_client.api import import_api
-from lakefs_client.model.import_status_resp import ImportStatusResp
+from lakefs_client.model.import_status import ImportStatus
from lakefs_client.model.error import Error
from pprint import pprint
# Defining the host is optional and defaults to http://localhost/api/v1
@@ -460,7 +336,7 @@ Name | Type | Description | Notes
### Return type
-[**ImportStatusResp**](ImportStatusResp.md)
+[**ImportStatus**](ImportStatus.md)
### Authorization
@@ -483,120 +359,3 @@ Name | Type | Description | Notes
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
-# **ingest_range**
-> IngestRangeCreationResponse ingest_range(repository, stage_range_creation)
-
-create a lakeFS range file from the source uri
-
-### Example
-
-* Basic Authentication (basic_auth):
-* Api Key Authentication (cookie_auth):
-* Bearer (JWT) Authentication (jwt_token):
-* Api Key Authentication (oidc_auth):
-* Api Key Authentication (saml_auth):
-
-```python
-import time
-import lakefs_client
-from lakefs_client.api import import_api
-from lakefs_client.model.ingest_range_creation_response import IngestRangeCreationResponse
-from lakefs_client.model.stage_range_creation import StageRangeCreation
-from lakefs_client.model.error import Error
-from pprint import pprint
-# Defining the host is optional and defaults to http://localhost/api/v1
-# See configuration.py for a list of all supported configuration parameters.
-configuration = lakefs_client.Configuration(
- host = "http://localhost/api/v1"
-)
-
-# The client must configure the authentication and authorization parameters
-# in accordance with the API server security policy.
-# Examples for each auth method are provided below, use the example that
-# satisfies your auth use case.
-
-# Configure HTTP basic authorization: basic_auth
-configuration = lakefs_client.Configuration(
- username = 'YOUR_USERNAME',
- password = 'YOUR_PASSWORD'
-)
-
-# Configure API key authorization: cookie_auth
-configuration.api_key['cookie_auth'] = 'YOUR_API_KEY'
-
-# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
-# configuration.api_key_prefix['cookie_auth'] = 'Bearer'
-
-# Configure Bearer authorization (JWT): jwt_token
-configuration = lakefs_client.Configuration(
- access_token = 'YOUR_BEARER_TOKEN'
-)
-
-# Configure API key authorization: oidc_auth
-configuration.api_key['oidc_auth'] = 'YOUR_API_KEY'
-
-# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
-# configuration.api_key_prefix['oidc_auth'] = 'Bearer'
-
-# Configure API key authorization: saml_auth
-configuration.api_key['saml_auth'] = 'YOUR_API_KEY'
-
-# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
-# configuration.api_key_prefix['saml_auth'] = 'Bearer'
-
-# Enter a context with an instance of the API client
-with lakefs_client.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = import_api.ImportApi(api_client)
- repository = "repository_example" # str |
- stage_range_creation = StageRangeCreation(
- from_source_uri="s3://my-bucket/production/collections/",
- after="production/collections/some/file.parquet",
- prepend="collections/",
- continuation_token="continuation_token_example",
- staging_token="staging_token_example",
- ) # StageRangeCreation |
-
- # example passing only required values which don't have defaults set
- try:
- # create a lakeFS range file from the source uri
- api_response = api_instance.ingest_range(repository, stage_range_creation)
- pprint(api_response)
- except lakefs_client.ApiException as e:
- print("Exception when calling ImportApi->ingest_range: %s\n" % e)
-```
-
-
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
- **repository** | **str**| |
- **stage_range_creation** | [**StageRangeCreation**](StageRangeCreation.md)| |
-
-### Return type
-
-[**IngestRangeCreationResponse**](IngestRangeCreationResponse.md)
-
-### Authorization
-
-[basic_auth](../README.md#basic_auth), [cookie_auth](../README.md#cookie_auth), [jwt_token](../README.md#jwt_token), [oidc_auth](../README.md#oidc_auth), [saml_auth](../README.md#saml_auth)
-
-### HTTP request headers
-
- - **Content-Type**: application/json
- - **Accept**: application/json
-
-
-### HTTP response details
-
-| Status code | Description | Response headers |
-|-------------|-------------|------------------|
-**201** | range metadata | - |
-**400** | Validation Error | - |
-**401** | Unauthorized | - |
-**404** | Resource Not Found | - |
-**0** | Internal Server Error | - |
-
-[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
-
diff --git a/clients/python/docs/ImportLocation.md b/clients/python/docs/ImportLocation.md
index 4825cd041ac..96b789a5955 100644
--- a/clients/python/docs/ImportLocation.md
+++ b/clients/python/docs/ImportLocation.md
@@ -5,7 +5,7 @@
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**type** | **str** | Path type, can either be 'common_prefix' or 'object' |
-**path** | **str** | A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type. |
+**path** | **str** | A source location to import path or to a single object. Must match the lakeFS installation blockstore type. |
**destination** | **str** | Destination for the imported objects on the branch |
**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional]
diff --git a/clients/python/docs/ImportPagination.md b/clients/python/docs/ImportPagination.md
deleted file mode 100644
index 9522ca6abaa..00000000000
--- a/clients/python/docs/ImportPagination.md
+++ /dev/null
@@ -1,15 +0,0 @@
-# ImportPagination
-
-
-## Properties
-Name | Type | Description | Notes
------------- | ------------- | ------------- | -------------
-**has_more** | **bool** | More keys to be ingested. |
-**last_key** | **str** | Last object store key that was ingested. |
-**continuation_token** | **str** | Opaque. Token used to import the next range. | [optional]
-**staging_token** | **str** | Staging token for skipped objects during ingest | [optional]
-**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional]
-
-[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
-
-
diff --git a/clients/python/docs/ImportStatusResp.md b/clients/python/docs/ImportStatus.md
similarity index 97%
rename from clients/python/docs/ImportStatusResp.md
rename to clients/python/docs/ImportStatus.md
index 2679fefd2b3..6d4daaa15e2 100644
--- a/clients/python/docs/ImportStatusResp.md
+++ b/clients/python/docs/ImportStatus.md
@@ -1,4 +1,4 @@
-# ImportStatusResp
+# ImportStatus
## Properties
diff --git a/clients/python/docs/IngestRangeCreationResponse.md b/clients/python/docs/IngestRangeCreationResponse.md
deleted file mode 100644
index 55637f92b4e..00000000000
--- a/clients/python/docs/IngestRangeCreationResponse.md
+++ /dev/null
@@ -1,13 +0,0 @@
-# IngestRangeCreationResponse
-
-
-## Properties
-Name | Type | Description | Notes
------------- | ------------- | ------------- | -------------
-**range** | [**RangeMetadata**](RangeMetadata.md) | | [optional]
-**pagination** | [**ImportPagination**](ImportPagination.md) | | [optional]
-**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional]
-
-[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
-
-
diff --git a/clients/python/docs/InternalApi.md b/clients/python/docs/InternalApi.md
index 42fe236887a..fd40783290c 100644
--- a/clients/python/docs/InternalApi.md
+++ b/clients/python/docs/InternalApi.md
@@ -11,7 +11,6 @@ Method | HTTP request | Description
[**set_garbage_collection_rules_preflight**](InternalApi.md#set_garbage_collection_rules_preflight) | **GET** /repositories/{repository}/gc/rules/set_allowed |
[**setup**](InternalApi.md#setup) | **POST** /setup_lakefs | setup lakeFS and create a first user
[**setup_comm_prefs**](InternalApi.md#setup_comm_prefs) | **POST** /setup_comm_prefs | setup communications preferences
-[**update_branch_token**](InternalApi.md#update_branch_token) | **PUT** /repositories/{repository}/branches/{branch}/update_token | modify branch staging token
[**upload_object_preflight**](InternalApi.md#upload_object_preflight) | **GET** /repositories/{repository}/branches/{branch}/objects/stage_allowed |
@@ -612,120 +611,6 @@ No authorization required
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
-# **update_branch_token**
-> update_branch_token(repository, branch, update_token)
-
-modify branch staging token
-
-### Example
-
-* Basic Authentication (basic_auth):
-* Api Key Authentication (cookie_auth):
-* Bearer (JWT) Authentication (jwt_token):
-* Api Key Authentication (oidc_auth):
-* Api Key Authentication (saml_auth):
-
-```python
-import time
-import lakefs_client
-from lakefs_client.api import internal_api
-from lakefs_client.model.update_token import UpdateToken
-from lakefs_client.model.error import Error
-from pprint import pprint
-# Defining the host is optional and defaults to http://localhost/api/v1
-# See configuration.py for a list of all supported configuration parameters.
-configuration = lakefs_client.Configuration(
- host = "http://localhost/api/v1"
-)
-
-# The client must configure the authentication and authorization parameters
-# in accordance with the API server security policy.
-# Examples for each auth method are provided below, use the example that
-# satisfies your auth use case.
-
-# Configure HTTP basic authorization: basic_auth
-configuration = lakefs_client.Configuration(
- username = 'YOUR_USERNAME',
- password = 'YOUR_PASSWORD'
-)
-
-# Configure API key authorization: cookie_auth
-configuration.api_key['cookie_auth'] = 'YOUR_API_KEY'
-
-# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
-# configuration.api_key_prefix['cookie_auth'] = 'Bearer'
-
-# Configure Bearer authorization (JWT): jwt_token
-configuration = lakefs_client.Configuration(
- access_token = 'YOUR_BEARER_TOKEN'
-)
-
-# Configure API key authorization: oidc_auth
-configuration.api_key['oidc_auth'] = 'YOUR_API_KEY'
-
-# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
-# configuration.api_key_prefix['oidc_auth'] = 'Bearer'
-
-# Configure API key authorization: saml_auth
-configuration.api_key['saml_auth'] = 'YOUR_API_KEY'
-
-# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
-# configuration.api_key_prefix['saml_auth'] = 'Bearer'
-
-# Enter a context with an instance of the API client
-with lakefs_client.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = internal_api.InternalApi(api_client)
- repository = "repository_example" # str |
- branch = "branch_example" # str |
- update_token = UpdateToken(
- staging_token="staging_token_example",
- ) # UpdateToken |
-
- # example passing only required values which don't have defaults set
- try:
- # modify branch staging token
- api_instance.update_branch_token(repository, branch, update_token)
- except lakefs_client.ApiException as e:
- print("Exception when calling InternalApi->update_branch_token: %s\n" % e)
-```
-
-
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
- **repository** | **str**| |
- **branch** | **str**| |
- **update_token** | [**UpdateToken**](UpdateToken.md)| |
-
-### Return type
-
-void (empty response body)
-
-### Authorization
-
-[basic_auth](../README.md#basic_auth), [cookie_auth](../README.md#cookie_auth), [jwt_token](../README.md#jwt_token), [oidc_auth](../README.md#oidc_auth), [saml_auth](../README.md#saml_auth)
-
-### HTTP request headers
-
- - **Content-Type**: application/json
- - **Accept**: application/json
-
-
-### HTTP response details
-
-| Status code | Description | Response headers |
-|-------------|-------------|------------------|
-**204** | branch updated successfully | - |
-**400** | Validation Error | - |
-**401** | Unauthorized | - |
-**403** | Forbidden | - |
-**404** | Resource Not Found | - |
-**0** | Internal Server Error | - |
-
-[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
-
# **upload_object_preflight**
> upload_object_preflight(repository, branch, path)
diff --git a/clients/python/docs/ObjectStats.md b/clients/python/docs/ObjectStats.md
index 23733d89b21..d5be770ca89 100644
--- a/clients/python/docs/ObjectStats.md
+++ b/clients/python/docs/ObjectStats.md
@@ -9,7 +9,7 @@ Name | Type | Description | Notes
**physical_address** | **str** | The location of the object on the underlying object store. Formatted as a native URI with the object store type as scheme (\"s3://...\", \"gs://...\", etc.) Or, in the case of presign=true, will be an HTTP URL to be consumed via regular HTTP GET |
**checksum** | **str** | |
**mtime** | **int** | Unix Epoch in seconds |
-**physical_address_expiry** | **int** | If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional]
+**physical_address_expiry** | **int** | If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional]
**size_bytes** | **int** | | [optional]
**metadata** | [**ObjectUserMetadata**](ObjectUserMetadata.md) | | [optional]
**content_type** | **str** | Object media type | [optional]
diff --git a/clients/python/docs/StageRangeCreation.md b/clients/python/docs/StageRangeCreation.md
deleted file mode 100644
index c0b4173d6f4..00000000000
--- a/clients/python/docs/StageRangeCreation.md
+++ /dev/null
@@ -1,16 +0,0 @@
-# StageRangeCreation
-
-
-## Properties
-Name | Type | Description | Notes
------------- | ------------- | ------------- | -------------
-**from_source_uri** | **str** | The source location of the ingested files. Must match the lakeFS installation blockstore type. |
-**after** | **str** | Only objects after this key would be ingested. |
-**prepend** | **str** | A prefix to prepend to ingested objects. |
-**continuation_token** | **str** | Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key. | [optional]
-**staging_token** | **str** | Opaque. Client should pass staging_token if received from server on previous request | [optional]
-**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional]
-
-[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
-
-
diff --git a/clients/python/docs/StagingLocation.md b/clients/python/docs/StagingLocation.md
index 35e4712f55d..1e6dcdf1190 100644
--- a/clients/python/docs/StagingLocation.md
+++ b/clients/python/docs/StagingLocation.md
@@ -7,8 +7,8 @@ Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**token** | **str** | opaque staging token to use to link uploaded object |
**physical_address** | **str** | | [optional]
-**presigned_url** | **str, none_type** | if presign=true is passed in the request, this field will contain a presigned URL to use when uploading | [optional]
-**presigned_url_expiry** | **int** | If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional]
+**presigned_url** | **str, none_type** | if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading | [optional]
+**presigned_url_expiry** | **int** | If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. | [optional]
**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional]
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
diff --git a/clients/python/docs/UpdatePasswordByToken.md b/clients/python/docs/UpdatePasswordByToken.md
deleted file mode 100644
index 7a7ffebee6f..00000000000
--- a/clients/python/docs/UpdatePasswordByToken.md
+++ /dev/null
@@ -1,14 +0,0 @@
-# UpdatePasswordByToken
-
-
-## Properties
-Name | Type | Description | Notes
------------- | ------------- | ------------- | -------------
-**token** | **str** | token used for authentication |
-**new_password** | **str** | new password to update |
-**email** | **str** | optional user email to match the token for verification | [optional]
-**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional]
-
-[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
-
-
diff --git a/clients/python/docs/User.md b/clients/python/docs/User.md
index fd8ec228439..dd570f69395 100644
--- a/clients/python/docs/User.md
+++ b/clients/python/docs/User.md
@@ -4,10 +4,9 @@
## Properties
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
-**id** | **str** | a unique identifier for the user. In password-based authentication, this is the email. |
+**id** | **str** | a unique identifier for the user. |
**creation_date** | **int** | Unix Epoch in seconds |
**friendly_name** | **str** | | [optional]
-**email** | **str** | | [optional]
**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional]
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
diff --git a/clients/python/docs/UserCreation.md b/clients/python/docs/UserCreation.md
index a0dca37f28d..c3a70a16228 100644
--- a/clients/python/docs/UserCreation.md
+++ b/clients/python/docs/UserCreation.md
@@ -4,7 +4,7 @@
## Properties
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
-**id** | **str** | a unique identifier for the user. In password-based authentication, this is the email. |
+**id** | **str** | a unique identifier for the user. |
**invite_user** | **bool** | | [optional]
**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional]
diff --git a/clients/python/lakefs_client/api/auth_api.py b/clients/python/lakefs_client/api/auth_api.py
index e69508ba528..7ce6c4bfc84 100644
--- a/clients/python/lakefs_client/api/auth_api.py
+++ b/clients/python/lakefs_client/api/auth_api.py
@@ -30,14 +30,12 @@
from lakefs_client.model.current_user import CurrentUser
from lakefs_client.model.error import Error
from lakefs_client.model.error_no_acl import ErrorNoACL
-from lakefs_client.model.forgot_password_request import ForgotPasswordRequest
from lakefs_client.model.group import Group
from lakefs_client.model.group_creation import GroupCreation
from lakefs_client.model.group_list import GroupList
from lakefs_client.model.login_information import LoginInformation
from lakefs_client.model.policy import Policy
from lakefs_client.model.policy_list import PolicyList
-from lakefs_client.model.update_password_by_token import UpdatePasswordByToken
from lakefs_client.model.user import User
from lakefs_client.model.user_creation import UserCreation
from lakefs_client.model.user_list import UserList
@@ -865,56 +863,6 @@ def __init__(self, api_client=None):
},
api_client=api_client
)
- self.forgot_password_endpoint = _Endpoint(
- settings={
- 'response_type': None,
- 'auth': [],
- 'endpoint_path': '/auth/password/forgot',
- 'operation_id': 'forgot_password',
- 'http_method': 'POST',
- 'servers': None,
- },
- params_map={
- 'all': [
- 'forgot_password_request',
- ],
- 'required': [
- 'forgot_password_request',
- ],
- 'nullable': [
- ],
- 'enum': [
- ],
- 'validation': [
- ]
- },
- root_map={
- 'validations': {
- },
- 'allowed_values': {
- },
- 'openapi_types': {
- 'forgot_password_request':
- (ForgotPasswordRequest,),
- },
- 'attribute_map': {
- },
- 'location_map': {
- 'forgot_password_request': 'body',
- },
- 'collection_format_map': {
- }
- },
- headers_map={
- 'accept': [
- 'application/json'
- ],
- 'content_type': [
- 'application/json'
- ]
- },
- api_client=api_client
- )
self.get_credentials_endpoint = _Endpoint(
settings={
'response_type': (Credentials,),
@@ -1946,58 +1894,6 @@ def __init__(self, api_client=None):
},
api_client=api_client
)
- self.update_password_endpoint = _Endpoint(
- settings={
- 'response_type': None,
- 'auth': [
- 'cookie_auth'
- ],
- 'endpoint_path': '/auth/password',
- 'operation_id': 'update_password',
- 'http_method': 'POST',
- 'servers': None,
- },
- params_map={
- 'all': [
- 'update_password_by_token',
- ],
- 'required': [
- 'update_password_by_token',
- ],
- 'nullable': [
- ],
- 'enum': [
- ],
- 'validation': [
- ]
- },
- root_map={
- 'validations': {
- },
- 'allowed_values': {
- },
- 'openapi_types': {
- 'update_password_by_token':
- (UpdatePasswordByToken,),
- },
- 'attribute_map': {
- },
- 'location_map': {
- 'update_password_by_token': 'body',
- },
- 'collection_format_map': {
- }
- },
- headers_map={
- 'accept': [
- 'application/json'
- ],
- 'content_type': [
- 'application/json'
- ]
- },
- api_client=api_client
- )
self.update_policy_endpoint = _Endpoint(
settings={
'response_type': (Policy,),
@@ -2991,71 +2887,6 @@ def detach_policy_from_user(
policy_id
return self.detach_policy_from_user_endpoint.call_with_http_info(**kwargs)
- def forgot_password(
- self,
- forgot_password_request,
- **kwargs
- ):
- """forgot password request initiates the password reset process # noqa: E501
-
- This method makes a synchronous HTTP request by default. To make an
- asynchronous HTTP request, please pass async_req=True
-
- >>> thread = api.forgot_password(forgot_password_request, async_req=True)
- >>> result = thread.get()
-
- Args:
- forgot_password_request (ForgotPasswordRequest):
-
- Keyword Args:
- _return_http_data_only (bool): response data without head status
- code and headers. Default is True.
- _preload_content (bool): if False, the urllib3.HTTPResponse object
- will be returned without reading/decoding response data.
- Default is True.
- _request_timeout (int/float/tuple): timeout setting for this request. If
- one number provided, it will be total request timeout. It can also
- be a pair (tuple) of (connection, read) timeouts.
- Default is None.
- _check_input_type (bool): specifies if type checking
- should be done one the data sent to the server.
- Default is True.
- _check_return_type (bool): specifies if type checking
- should be done one the data received from the server.
- Default is True.
- _host_index (int/None): specifies the index of the server
- that we want to use.
- Default is read from the configuration.
- async_req (bool): execute request asynchronously
-
- Returns:
- None
- If the method is called asynchronously, returns the request
- thread.
- """
- kwargs['async_req'] = kwargs.get(
- 'async_req', False
- )
- kwargs['_return_http_data_only'] = kwargs.get(
- '_return_http_data_only', True
- )
- kwargs['_preload_content'] = kwargs.get(
- '_preload_content', True
- )
- kwargs['_request_timeout'] = kwargs.get(
- '_request_timeout', None
- )
- kwargs['_check_input_type'] = kwargs.get(
- '_check_input_type', True
- )
- kwargs['_check_return_type'] = kwargs.get(
- '_check_return_type', True
- )
- kwargs['_host_index'] = kwargs.get('_host_index')
- kwargs['forgot_password_request'] = \
- forgot_password_request
- return self.forgot_password_endpoint.call_with_http_info(**kwargs)
-
def get_credentials(
self,
user_id,
@@ -4105,71 +3936,6 @@ def set_group_acl(
acl
return self.set_group_acl_endpoint.call_with_http_info(**kwargs)
- def update_password(
- self,
- update_password_by_token,
- **kwargs
- ):
- """Update user password by reset_password token # noqa: E501
-
- This method makes a synchronous HTTP request by default. To make an
- asynchronous HTTP request, please pass async_req=True
-
- >>> thread = api.update_password(update_password_by_token, async_req=True)
- >>> result = thread.get()
-
- Args:
- update_password_by_token (UpdatePasswordByToken):
-
- Keyword Args:
- _return_http_data_only (bool): response data without head status
- code and headers. Default is True.
- _preload_content (bool): if False, the urllib3.HTTPResponse object
- will be returned without reading/decoding response data.
- Default is True.
- _request_timeout (int/float/tuple): timeout setting for this request. If
- one number provided, it will be total request timeout. It can also
- be a pair (tuple) of (connection, read) timeouts.
- Default is None.
- _check_input_type (bool): specifies if type checking
- should be done one the data sent to the server.
- Default is True.
- _check_return_type (bool): specifies if type checking
- should be done one the data received from the server.
- Default is True.
- _host_index (int/None): specifies the index of the server
- that we want to use.
- Default is read from the configuration.
- async_req (bool): execute request asynchronously
-
- Returns:
- None
- If the method is called asynchronously, returns the request
- thread.
- """
- kwargs['async_req'] = kwargs.get(
- 'async_req', False
- )
- kwargs['_return_http_data_only'] = kwargs.get(
- '_return_http_data_only', True
- )
- kwargs['_preload_content'] = kwargs.get(
- '_preload_content', True
- )
- kwargs['_request_timeout'] = kwargs.get(
- '_request_timeout', None
- )
- kwargs['_check_input_type'] = kwargs.get(
- '_check_input_type', True
- )
- kwargs['_check_return_type'] = kwargs.get(
- '_check_return_type', True
- )
- kwargs['_host_index'] = kwargs.get('_host_index')
- kwargs['update_password_by_token'] = \
- update_password_by_token
- return self.update_password_endpoint.call_with_http_info(**kwargs)
-
def update_policy(
self,
policy_id,
diff --git a/clients/python/lakefs_client/api/import_api.py b/clients/python/lakefs_client/api/import_api.py
index edc77687799..5bc04b88c76 100644
--- a/clients/python/lakefs_client/api/import_api.py
+++ b/clients/python/lakefs_client/api/import_api.py
@@ -25,11 +25,7 @@
from lakefs_client.model.error import Error
from lakefs_client.model.import_creation import ImportCreation
from lakefs_client.model.import_creation_response import ImportCreationResponse
-from lakefs_client.model.import_status_resp import ImportStatusResp
-from lakefs_client.model.ingest_range_creation_response import IngestRangeCreationResponse
-from lakefs_client.model.meta_range_creation import MetaRangeCreation
-from lakefs_client.model.meta_range_creation_response import MetaRangeCreationResponse
-from lakefs_client.model.stage_range_creation import StageRangeCreation
+from lakefs_client.model.import_status import ImportStatus
class ImportApi(object):
@@ -43,68 +39,6 @@ def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
- self.create_meta_range_endpoint = _Endpoint(
- settings={
- 'response_type': (MetaRangeCreationResponse,),
- 'auth': [
- 'basic_auth',
- 'cookie_auth',
- 'jwt_token',
- 'oidc_auth',
- 'saml_auth'
- ],
- 'endpoint_path': '/repositories/{repository}/branches/metaranges',
- 'operation_id': 'create_meta_range',
- 'http_method': 'POST',
- 'servers': None,
- },
- params_map={
- 'all': [
- 'repository',
- 'meta_range_creation',
- ],
- 'required': [
- 'repository',
- 'meta_range_creation',
- ],
- 'nullable': [
- ],
- 'enum': [
- ],
- 'validation': [
- ]
- },
- root_map={
- 'validations': {
- },
- 'allowed_values': {
- },
- 'openapi_types': {
- 'repository':
- (str,),
- 'meta_range_creation':
- (MetaRangeCreation,),
- },
- 'attribute_map': {
- 'repository': 'repository',
- },
- 'location_map': {
- 'repository': 'path',
- 'meta_range_creation': 'body',
- },
- 'collection_format_map': {
- }
- },
- headers_map={
- 'accept': [
- 'application/json'
- ],
- 'content_type': [
- 'application/json'
- ]
- },
- api_client=api_client
- )
self.import_cancel_endpoint = _Endpoint(
settings={
'response_type': None,
@@ -242,7 +176,7 @@ def __init__(self, api_client=None):
)
self.import_status_endpoint = _Endpoint(
settings={
- 'response_type': (ImportStatusResp,),
+ 'response_type': (ImportStatus,),
'auth': [
'basic_auth',
'cookie_auth',
@@ -307,137 +241,6 @@ def __init__(self, api_client=None):
},
api_client=api_client
)
- self.ingest_range_endpoint = _Endpoint(
- settings={
- 'response_type': (IngestRangeCreationResponse,),
- 'auth': [
- 'basic_auth',
- 'cookie_auth',
- 'jwt_token',
- 'oidc_auth',
- 'saml_auth'
- ],
- 'endpoint_path': '/repositories/{repository}/branches/ranges',
- 'operation_id': 'ingest_range',
- 'http_method': 'POST',
- 'servers': None,
- },
- params_map={
- 'all': [
- 'repository',
- 'stage_range_creation',
- ],
- 'required': [
- 'repository',
- 'stage_range_creation',
- ],
- 'nullable': [
- ],
- 'enum': [
- ],
- 'validation': [
- ]
- },
- root_map={
- 'validations': {
- },
- 'allowed_values': {
- },
- 'openapi_types': {
- 'repository':
- (str,),
- 'stage_range_creation':
- (StageRangeCreation,),
- },
- 'attribute_map': {
- 'repository': 'repository',
- },
- 'location_map': {
- 'repository': 'path',
- 'stage_range_creation': 'body',
- },
- 'collection_format_map': {
- }
- },
- headers_map={
- 'accept': [
- 'application/json'
- ],
- 'content_type': [
- 'application/json'
- ]
- },
- api_client=api_client
- )
-
- def create_meta_range(
- self,
- repository,
- meta_range_creation,
- **kwargs
- ):
- """create a lakeFS metarange file from the given ranges # noqa: E501
-
- This method makes a synchronous HTTP request by default. To make an
- asynchronous HTTP request, please pass async_req=True
-
- >>> thread = api.create_meta_range(repository, meta_range_creation, async_req=True)
- >>> result = thread.get()
-
- Args:
- repository (str):
- meta_range_creation (MetaRangeCreation):
-
- Keyword Args:
- _return_http_data_only (bool): response data without head status
- code and headers. Default is True.
- _preload_content (bool): if False, the urllib3.HTTPResponse object
- will be returned without reading/decoding response data.
- Default is True.
- _request_timeout (int/float/tuple): timeout setting for this request. If
- one number provided, it will be total request timeout. It can also
- be a pair (tuple) of (connection, read) timeouts.
- Default is None.
- _check_input_type (bool): specifies if type checking
- should be done one the data sent to the server.
- Default is True.
- _check_return_type (bool): specifies if type checking
- should be done one the data received from the server.
- Default is True.
- _host_index (int/None): specifies the index of the server
- that we want to use.
- Default is read from the configuration.
- async_req (bool): execute request asynchronously
-
- Returns:
- MetaRangeCreationResponse
- If the method is called asynchronously, returns the request
- thread.
- """
- kwargs['async_req'] = kwargs.get(
- 'async_req', False
- )
- kwargs['_return_http_data_only'] = kwargs.get(
- '_return_http_data_only', True
- )
- kwargs['_preload_content'] = kwargs.get(
- '_preload_content', True
- )
- kwargs['_request_timeout'] = kwargs.get(
- '_request_timeout', None
- )
- kwargs['_check_input_type'] = kwargs.get(
- '_check_input_type', True
- )
- kwargs['_check_return_type'] = kwargs.get(
- '_check_return_type', True
- )
- kwargs['_host_index'] = kwargs.get('_host_index')
- kwargs['repository'] = \
- repository
- kwargs['meta_range_creation'] = \
- meta_range_creation
- return self.create_meta_range_endpoint.call_with_http_info(**kwargs)
def import_cancel(
self,
@@ -627,7 +430,7 @@ def import_status(
async_req (bool): execute request asynchronously
Returns:
- ImportStatusResp
+ ImportStatus
If the method is called asynchronously, returns the request
thread.
"""
@@ -658,72 +461,3 @@ def import_status(
id
return self.import_status_endpoint.call_with_http_info(**kwargs)
- def ingest_range(
- self,
- repository,
- stage_range_creation,
- **kwargs
- ):
- """create a lakeFS range file from the source uri # noqa: E501
-
- This method makes a synchronous HTTP request by default. To make an
- asynchronous HTTP request, please pass async_req=True
-
- >>> thread = api.ingest_range(repository, stage_range_creation, async_req=True)
- >>> result = thread.get()
-
- Args:
- repository (str):
- stage_range_creation (StageRangeCreation):
-
- Keyword Args:
- _return_http_data_only (bool): response data without head status
- code and headers. Default is True.
- _preload_content (bool): if False, the urllib3.HTTPResponse object
- will be returned without reading/decoding response data.
- Default is True.
- _request_timeout (int/float/tuple): timeout setting for this request. If
- one number provided, it will be total request timeout. It can also
- be a pair (tuple) of (connection, read) timeouts.
- Default is None.
- _check_input_type (bool): specifies if type checking
- should be done one the data sent to the server.
- Default is True.
- _check_return_type (bool): specifies if type checking
- should be done one the data received from the server.
- Default is True.
- _host_index (int/None): specifies the index of the server
- that we want to use.
- Default is read from the configuration.
- async_req (bool): execute request asynchronously
-
- Returns:
- IngestRangeCreationResponse
- If the method is called asynchronously, returns the request
- thread.
- """
- kwargs['async_req'] = kwargs.get(
- 'async_req', False
- )
- kwargs['_return_http_data_only'] = kwargs.get(
- '_return_http_data_only', True
- )
- kwargs['_preload_content'] = kwargs.get(
- '_preload_content', True
- )
- kwargs['_request_timeout'] = kwargs.get(
- '_request_timeout', None
- )
- kwargs['_check_input_type'] = kwargs.get(
- '_check_input_type', True
- )
- kwargs['_check_return_type'] = kwargs.get(
- '_check_return_type', True
- )
- kwargs['_host_index'] = kwargs.get('_host_index')
- kwargs['repository'] = \
- repository
- kwargs['stage_range_creation'] = \
- stage_range_creation
- return self.ingest_range_endpoint.call_with_http_info(**kwargs)
-
diff --git a/clients/python/lakefs_client/api/internal_api.py b/clients/python/lakefs_client/api/internal_api.py
index 68b1538ae9b..6f4238cd944 100644
--- a/clients/python/lakefs_client/api/internal_api.py
+++ b/clients/python/lakefs_client/api/internal_api.py
@@ -29,7 +29,6 @@
from lakefs_client.model.setup import Setup
from lakefs_client.model.setup_state import SetupState
from lakefs_client.model.stats_events_list import StatsEventsList
-from lakefs_client.model.update_token import UpdateToken
class InternalApi(object):
@@ -393,74 +392,6 @@ def __init__(self, api_client=None):
},
api_client=api_client
)
- self.update_branch_token_endpoint = _Endpoint(
- settings={
- 'response_type': None,
- 'auth': [
- 'basic_auth',
- 'cookie_auth',
- 'jwt_token',
- 'oidc_auth',
- 'saml_auth'
- ],
- 'endpoint_path': '/repositories/{repository}/branches/{branch}/update_token',
- 'operation_id': 'update_branch_token',
- 'http_method': 'PUT',
- 'servers': None,
- },
- params_map={
- 'all': [
- 'repository',
- 'branch',
- 'update_token',
- ],
- 'required': [
- 'repository',
- 'branch',
- 'update_token',
- ],
- 'nullable': [
- ],
- 'enum': [
- ],
- 'validation': [
- ]
- },
- root_map={
- 'validations': {
- },
- 'allowed_values': {
- },
- 'openapi_types': {
- 'repository':
- (str,),
- 'branch':
- (str,),
- 'update_token':
- (UpdateToken,),
- },
- 'attribute_map': {
- 'repository': 'repository',
- 'branch': 'branch',
- },
- 'location_map': {
- 'repository': 'path',
- 'branch': 'path',
- 'update_token': 'body',
- },
- 'collection_format_map': {
- }
- },
- headers_map={
- 'accept': [
- 'application/json'
- ],
- 'content_type': [
- 'application/json'
- ]
- },
- api_client=api_client
- )
self.upload_object_preflight_endpoint = _Endpoint(
settings={
'response_type': None,
@@ -974,79 +905,6 @@ def setup_comm_prefs(
comm_prefs_input
return self.setup_comm_prefs_endpoint.call_with_http_info(**kwargs)
- def update_branch_token(
- self,
- repository,
- branch,
- update_token,
- **kwargs
- ):
- """modify branch staging token # noqa: E501
-
- This method makes a synchronous HTTP request by default. To make an
- asynchronous HTTP request, please pass async_req=True
-
- >>> thread = api.update_branch_token(repository, branch, update_token, async_req=True)
- >>> result = thread.get()
-
- Args:
- repository (str):
- branch (str):
- update_token (UpdateToken):
-
- Keyword Args:
- _return_http_data_only (bool): response data without head status
- code and headers. Default is True.
- _preload_content (bool): if False, the urllib3.HTTPResponse object
- will be returned without reading/decoding response data.
- Default is True.
- _request_timeout (int/float/tuple): timeout setting for this request. If
- one number provided, it will be total request timeout. It can also
- be a pair (tuple) of (connection, read) timeouts.
- Default is None.
- _check_input_type (bool): specifies if type checking
- should be done one the data sent to the server.
- Default is True.
- _check_return_type (bool): specifies if type checking
- should be done one the data received from the server.
- Default is True.
- _host_index (int/None): specifies the index of the server
- that we want to use.
- Default is read from the configuration.
- async_req (bool): execute request asynchronously
-
- Returns:
- None
- If the method is called asynchronously, returns the request
- thread.
- """
- kwargs['async_req'] = kwargs.get(
- 'async_req', False
- )
- kwargs['_return_http_data_only'] = kwargs.get(
- '_return_http_data_only', True
- )
- kwargs['_preload_content'] = kwargs.get(
- '_preload_content', True
- )
- kwargs['_request_timeout'] = kwargs.get(
- '_request_timeout', None
- )
- kwargs['_check_input_type'] = kwargs.get(
- '_check_input_type', True
- )
- kwargs['_check_return_type'] = kwargs.get(
- '_check_return_type', True
- )
- kwargs['_host_index'] = kwargs.get('_host_index')
- kwargs['repository'] = \
- repository
- kwargs['branch'] = \
- branch
- kwargs['update_token'] = \
- update_token
- return self.update_branch_token_endpoint.call_with_http_info(**kwargs)
-
def upload_object_preflight(
self,
repository,
diff --git a/clients/python/lakefs_client/model/forgot_password_request.py b/clients/python/lakefs_client/model/forgot_password_request.py
deleted file mode 100644
index 318b9fa3d72..00000000000
--- a/clients/python/lakefs_client/model/forgot_password_request.py
+++ /dev/null
@@ -1,262 +0,0 @@
-"""
- lakeFS API
-
- lakeFS HTTP API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: services@treeverse.io
- Generated by: https://openapi-generator.tech
-"""
-
-
-import re # noqa: F401
-import sys # noqa: F401
-
-from lakefs_client.model_utils import ( # noqa: F401
- ApiTypeError,
- ModelComposed,
- ModelNormal,
- ModelSimple,
- cached_property,
- change_keys_js_to_python,
- convert_js_args_to_python_args,
- date,
- datetime,
- file_type,
- none_type,
- validate_get_composed_info,
-)
-from ..model_utils import OpenApiModel
-from lakefs_client.exceptions import ApiAttributeError
-
-
-
-class ForgotPasswordRequest(ModelNormal):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
-
- Attributes:
- allowed_values (dict): The key is the tuple path to the attribute
- and the for var_name this is (var_name,). The value is a dict
- with a capitalized key describing the allowed value and an allowed
- value. These dicts store the allowed enum values.
- attribute_map (dict): The key is attribute name
- and the value is json key in definition.
- discriminator_value_class_map (dict): A dict to go from the discriminator
- variable value to the discriminator class name.
- validations (dict): The key is the tuple path to the attribute
- and the for var_name this is (var_name,). The value is a dict
- that stores validations for max_length, min_length, max_items,
- min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
- inclusive_minimum, and regex.
- additional_properties_type (tuple): A tuple of classes accepted
- as additional properties values.
- """
-
- allowed_values = {
- }
-
- validations = {
- }
-
- @cached_property
- def additional_properties_type():
- """
- This must be a method because a model may have properties that are
- of type self, this must run after the class is loaded
- """
- return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
-
- _nullable = False
-
- @cached_property
- def openapi_types():
- """
- This must be a method because a model may have properties that are
- of type self, this must run after the class is loaded
-
- Returns
- openapi_types (dict): The key is attribute name
- and the value is attribute type.
- """
- return {
- 'email': (str,), # noqa: E501
- }
-
- @cached_property
- def discriminator():
- return None
-
-
- attribute_map = {
- 'email': 'email', # noqa: E501
- }
-
- read_only_vars = {
- }
-
- _composed_schemas = {}
-
- @classmethod
- @convert_js_args_to_python_args
- def _from_openapi_data(cls, email, *args, **kwargs): # noqa: E501
- """ForgotPasswordRequest - a model defined in OpenAPI
-
- Args:
- email (str):
-
- Keyword Args:
- _check_type (bool): if True, values for parameters in openapi_types
- will be type checked and a TypeError will be
- raised if the wrong type is input.
- Defaults to True
- _path_to_item (tuple/list): This is a list of keys or values to
- drill down to the model in received_data
- when deserializing a response
- _spec_property_naming (bool): True if the variable names in the input data
- are serialized names, as specified in the OpenAPI document.
- False if the variable names in the input data
- are pythonic names, e.g. snake case (default)
- _configuration (Configuration): the instance to use when
- deserializing a file_type parameter.
- If passed, type conversion is attempted
- If omitted no type conversion is done.
- _visited_composed_classes (tuple): This stores a tuple of
- classes that we have traveled through so that
- if we see that class again we will not use its
- discriminator again.
- When traveling through a discriminator, the
- composed schema that is
- is traveled through is added to this set.
- For example if Animal has a discriminator
- petType and we pass in "Dog", and the class Dog
- allOf includes Animal, we move through Animal
- once using the discriminator, and pick Dog.
- Then in Dog, we will make an instance of the
- Animal class but this time we won't travel
- through its discriminator because we passed in
- _visited_composed_classes = (Animal,)
- """
-
- _check_type = kwargs.pop('_check_type', True)
- _spec_property_naming = kwargs.pop('_spec_property_naming', False)
- _path_to_item = kwargs.pop('_path_to_item', ())
- _configuration = kwargs.pop('_configuration', None)
- _visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
-
- self = super(OpenApiModel, cls).__new__(cls)
-
- if args:
- raise ApiTypeError(
- "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
- args,
- self.__class__.__name__,
- ),
- path_to_item=_path_to_item,
- valid_classes=(self.__class__,),
- )
-
- self._data_store = {}
- self._check_type = _check_type
- self._spec_property_naming = _spec_property_naming
- self._path_to_item = _path_to_item
- self._configuration = _configuration
- self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
-
- self.email = email
- for var_name, var_value in kwargs.items():
- if var_name not in self.attribute_map and \
- self._configuration is not None and \
- self._configuration.discard_unknown_keys and \
- self.additional_properties_type is None:
- # discard variable.
- continue
- setattr(self, var_name, var_value)
- return self
-
- required_properties = set([
- '_data_store',
- '_check_type',
- '_spec_property_naming',
- '_path_to_item',
- '_configuration',
- '_visited_composed_classes',
- ])
-
- @convert_js_args_to_python_args
- def __init__(self, email, *args, **kwargs): # noqa: E501
- """ForgotPasswordRequest - a model defined in OpenAPI
-
- Args:
- email (str):
-
- Keyword Args:
- _check_type (bool): if True, values for parameters in openapi_types
- will be type checked and a TypeError will be
- raised if the wrong type is input.
- Defaults to True
- _path_to_item (tuple/list): This is a list of keys or values to
- drill down to the model in received_data
- when deserializing a response
- _spec_property_naming (bool): True if the variable names in the input data
- are serialized names, as specified in the OpenAPI document.
- False if the variable names in the input data
- are pythonic names, e.g. snake case (default)
- _configuration (Configuration): the instance to use when
- deserializing a file_type parameter.
- If passed, type conversion is attempted
- If omitted no type conversion is done.
- _visited_composed_classes (tuple): This stores a tuple of
- classes that we have traveled through so that
- if we see that class again we will not use its
- discriminator again.
- When traveling through a discriminator, the
- composed schema that is
- is traveled through is added to this set.
- For example if Animal has a discriminator
- petType and we pass in "Dog", and the class Dog
- allOf includes Animal, we move through Animal
- once using the discriminator, and pick Dog.
- Then in Dog, we will make an instance of the
- Animal class but this time we won't travel
- through its discriminator because we passed in
- _visited_composed_classes = (Animal,)
- """
-
- _check_type = kwargs.pop('_check_type', True)
- _spec_property_naming = kwargs.pop('_spec_property_naming', False)
- _path_to_item = kwargs.pop('_path_to_item', ())
- _configuration = kwargs.pop('_configuration', None)
- _visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
-
- if args:
- raise ApiTypeError(
- "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
- args,
- self.__class__.__name__,
- ),
- path_to_item=_path_to_item,
- valid_classes=(self.__class__,),
- )
-
- self._data_store = {}
- self._check_type = _check_type
- self._spec_property_naming = _spec_property_naming
- self._path_to_item = _path_to_item
- self._configuration = _configuration
- self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
-
- self.email = email
- for var_name, var_value in kwargs.items():
- if var_name not in self.attribute_map and \
- self._configuration is not None and \
- self._configuration.discard_unknown_keys and \
- self.additional_properties_type is None:
- # discard variable.
- continue
- setattr(self, var_name, var_value)
- if var_name in self.read_only_vars:
- raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
- f"class with read only attributes.")
diff --git a/clients/python/lakefs_client/model/import_location.py b/clients/python/lakefs_client/model/import_location.py
index 27198dfbe4e..2aa881a8d38 100644
--- a/clients/python/lakefs_client/model/import_location.py
+++ b/clients/python/lakefs_client/model/import_location.py
@@ -114,7 +114,7 @@ def _from_openapi_data(cls, type, path, destination, *args, **kwargs): # noqa:
Args:
type (str): Path type, can either be 'common_prefix' or 'object'
- path (str): A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type.
+ path (str): A source location to import path or to a single object. Must match the lakeFS installation blockstore type.
destination (str): Destination for the imported objects on the branch
Keyword Args:
@@ -203,7 +203,7 @@ def __init__(self, type, path, destination, *args, **kwargs): # noqa: E501
Args:
type (str): Path type, can either be 'common_prefix' or 'object'
- path (str): A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type.
+ path (str): A source location to import path or to a single object. Must match the lakeFS installation blockstore type.
destination (str): Destination for the imported objects on the branch
Keyword Args:
diff --git a/clients/python/lakefs_client/model/import_pagination.py b/clients/python/lakefs_client/model/import_pagination.py
deleted file mode 100644
index 18deae1654d..00000000000
--- a/clients/python/lakefs_client/model/import_pagination.py
+++ /dev/null
@@ -1,276 +0,0 @@
-"""
- lakeFS API
-
- lakeFS HTTP API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: services@treeverse.io
- Generated by: https://openapi-generator.tech
-"""
-
-
-import re # noqa: F401
-import sys # noqa: F401
-
-from lakefs_client.model_utils import ( # noqa: F401
- ApiTypeError,
- ModelComposed,
- ModelNormal,
- ModelSimple,
- cached_property,
- change_keys_js_to_python,
- convert_js_args_to_python_args,
- date,
- datetime,
- file_type,
- none_type,
- validate_get_composed_info,
-)
-from ..model_utils import OpenApiModel
-from lakefs_client.exceptions import ApiAttributeError
-
-
-
-class ImportPagination(ModelNormal):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
-
- Attributes:
- allowed_values (dict): The key is the tuple path to the attribute
- and the for var_name this is (var_name,). The value is a dict
- with a capitalized key describing the allowed value and an allowed
- value. These dicts store the allowed enum values.
- attribute_map (dict): The key is attribute name
- and the value is json key in definition.
- discriminator_value_class_map (dict): A dict to go from the discriminator
- variable value to the discriminator class name.
- validations (dict): The key is the tuple path to the attribute
- and the for var_name this is (var_name,). The value is a dict
- that stores validations for max_length, min_length, max_items,
- min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
- inclusive_minimum, and regex.
- additional_properties_type (tuple): A tuple of classes accepted
- as additional properties values.
- """
-
- allowed_values = {
- }
-
- validations = {
- }
-
- @cached_property
- def additional_properties_type():
- """
- This must be a method because a model may have properties that are
- of type self, this must run after the class is loaded
- """
- return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
-
- _nullable = False
-
- @cached_property
- def openapi_types():
- """
- This must be a method because a model may have properties that are
- of type self, this must run after the class is loaded
-
- Returns
- openapi_types (dict): The key is attribute name
- and the value is attribute type.
- """
- return {
- 'has_more': (bool,), # noqa: E501
- 'last_key': (str,), # noqa: E501
- 'continuation_token': (str,), # noqa: E501
- 'staging_token': (str,), # noqa: E501
- }
-
- @cached_property
- def discriminator():
- return None
-
-
- attribute_map = {
- 'has_more': 'has_more', # noqa: E501
- 'last_key': 'last_key', # noqa: E501
- 'continuation_token': 'continuation_token', # noqa: E501
- 'staging_token': 'staging_token', # noqa: E501
- }
-
- read_only_vars = {
- }
-
- _composed_schemas = {}
-
- @classmethod
- @convert_js_args_to_python_args
- def _from_openapi_data(cls, has_more, last_key, *args, **kwargs): # noqa: E501
- """ImportPagination - a model defined in OpenAPI
-
- Args:
- has_more (bool): More keys to be ingested.
- last_key (str): Last object store key that was ingested.
-
- Keyword Args:
- _check_type (bool): if True, values for parameters in openapi_types
- will be type checked and a TypeError will be
- raised if the wrong type is input.
- Defaults to True
- _path_to_item (tuple/list): This is a list of keys or values to
- drill down to the model in received_data
- when deserializing a response
- _spec_property_naming (bool): True if the variable names in the input data
- are serialized names, as specified in the OpenAPI document.
- False if the variable names in the input data
- are pythonic names, e.g. snake case (default)
- _configuration (Configuration): the instance to use when
- deserializing a file_type parameter.
- If passed, type conversion is attempted
- If omitted no type conversion is done.
- _visited_composed_classes (tuple): This stores a tuple of
- classes that we have traveled through so that
- if we see that class again we will not use its
- discriminator again.
- When traveling through a discriminator, the
- composed schema that is
- is traveled through is added to this set.
- For example if Animal has a discriminator
- petType and we pass in "Dog", and the class Dog
- allOf includes Animal, we move through Animal
- once using the discriminator, and pick Dog.
- Then in Dog, we will make an instance of the
- Animal class but this time we won't travel
- through its discriminator because we passed in
- _visited_composed_classes = (Animal,)
- continuation_token (str): Opaque. Token used to import the next range.. [optional] # noqa: E501
- staging_token (str): Staging token for skipped objects during ingest. [optional] # noqa: E501
- """
-
- _check_type = kwargs.pop('_check_type', True)
- _spec_property_naming = kwargs.pop('_spec_property_naming', False)
- _path_to_item = kwargs.pop('_path_to_item', ())
- _configuration = kwargs.pop('_configuration', None)
- _visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
-
- self = super(OpenApiModel, cls).__new__(cls)
-
- if args:
- raise ApiTypeError(
- "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
- args,
- self.__class__.__name__,
- ),
- path_to_item=_path_to_item,
- valid_classes=(self.__class__,),
- )
-
- self._data_store = {}
- self._check_type = _check_type
- self._spec_property_naming = _spec_property_naming
- self._path_to_item = _path_to_item
- self._configuration = _configuration
- self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
-
- self.has_more = has_more
- self.last_key = last_key
- for var_name, var_value in kwargs.items():
- if var_name not in self.attribute_map and \
- self._configuration is not None and \
- self._configuration.discard_unknown_keys and \
- self.additional_properties_type is None:
- # discard variable.
- continue
- setattr(self, var_name, var_value)
- return self
-
- required_properties = set([
- '_data_store',
- '_check_type',
- '_spec_property_naming',
- '_path_to_item',
- '_configuration',
- '_visited_composed_classes',
- ])
-
- @convert_js_args_to_python_args
- def __init__(self, has_more, last_key, *args, **kwargs): # noqa: E501
- """ImportPagination - a model defined in OpenAPI
-
- Args:
- has_more (bool): More keys to be ingested.
- last_key (str): Last object store key that was ingested.
-
- Keyword Args:
- _check_type (bool): if True, values for parameters in openapi_types
- will be type checked and a TypeError will be
- raised if the wrong type is input.
- Defaults to True
- _path_to_item (tuple/list): This is a list of keys or values to
- drill down to the model in received_data
- when deserializing a response
- _spec_property_naming (bool): True if the variable names in the input data
- are serialized names, as specified in the OpenAPI document.
- False if the variable names in the input data
- are pythonic names, e.g. snake case (default)
- _configuration (Configuration): the instance to use when
- deserializing a file_type parameter.
- If passed, type conversion is attempted
- If omitted no type conversion is done.
- _visited_composed_classes (tuple): This stores a tuple of
- classes that we have traveled through so that
- if we see that class again we will not use its
- discriminator again.
- When traveling through a discriminator, the
- composed schema that is
- is traveled through is added to this set.
- For example if Animal has a discriminator
- petType and we pass in "Dog", and the class Dog
- allOf includes Animal, we move through Animal
- once using the discriminator, and pick Dog.
- Then in Dog, we will make an instance of the
- Animal class but this time we won't travel
- through its discriminator because we passed in
- _visited_composed_classes = (Animal,)
- continuation_token (str): Opaque. Token used to import the next range.. [optional] # noqa: E501
- staging_token (str): Staging token for skipped objects during ingest. [optional] # noqa: E501
- """
-
- _check_type = kwargs.pop('_check_type', True)
- _spec_property_naming = kwargs.pop('_spec_property_naming', False)
- _path_to_item = kwargs.pop('_path_to_item', ())
- _configuration = kwargs.pop('_configuration', None)
- _visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
-
- if args:
- raise ApiTypeError(
- "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
- args,
- self.__class__.__name__,
- ),
- path_to_item=_path_to_item,
- valid_classes=(self.__class__,),
- )
-
- self._data_store = {}
- self._check_type = _check_type
- self._spec_property_naming = _spec_property_naming
- self._path_to_item = _path_to_item
- self._configuration = _configuration
- self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
-
- self.has_more = has_more
- self.last_key = last_key
- for var_name, var_value in kwargs.items():
- if var_name not in self.attribute_map and \
- self._configuration is not None and \
- self._configuration.discard_unknown_keys and \
- self.additional_properties_type is None:
- # discard variable.
- continue
- setattr(self, var_name, var_value)
- if var_name in self.read_only_vars:
- raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
- f"class with read only attributes.")
diff --git a/clients/python/lakefs_client/model/import_status_resp.py b/clients/python/lakefs_client/model/import_status.py
similarity index 98%
rename from clients/python/lakefs_client/model/import_status_resp.py
rename to clients/python/lakefs_client/model/import_status.py
index 4643fd64228..a07f1b6d515 100644
--- a/clients/python/lakefs_client/model/import_status_resp.py
+++ b/clients/python/lakefs_client/model/import_status.py
@@ -37,7 +37,7 @@ def lazy_import():
globals()['Error'] = Error
-class ImportStatusResp(ModelNormal):
+class ImportStatus(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
@@ -120,7 +120,7 @@ def discriminator():
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, completed, update_time, *args, **kwargs): # noqa: E501
- """ImportStatusResp - a model defined in OpenAPI
+ """ImportStatus - a model defined in OpenAPI
Args:
completed (bool):
@@ -211,7 +211,7 @@ def _from_openapi_data(cls, completed, update_time, *args, **kwargs): # noqa: E
@convert_js_args_to_python_args
def __init__(self, completed, update_time, *args, **kwargs): # noqa: E501
- """ImportStatusResp - a model defined in OpenAPI
+ """ImportStatus - a model defined in OpenAPI
Args:
completed (bool):
diff --git a/clients/python/lakefs_client/model/ingest_range_creation_response.py b/clients/python/lakefs_client/model/ingest_range_creation_response.py
deleted file mode 100644
index 8bdf19921ec..00000000000
--- a/clients/python/lakefs_client/model/ingest_range_creation_response.py
+++ /dev/null
@@ -1,268 +0,0 @@
-"""
- lakeFS API
-
- lakeFS HTTP API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: services@treeverse.io
- Generated by: https://openapi-generator.tech
-"""
-
-
-import re # noqa: F401
-import sys # noqa: F401
-
-from lakefs_client.model_utils import ( # noqa: F401
- ApiTypeError,
- ModelComposed,
- ModelNormal,
- ModelSimple,
- cached_property,
- change_keys_js_to_python,
- convert_js_args_to_python_args,
- date,
- datetime,
- file_type,
- none_type,
- validate_get_composed_info,
-)
-from ..model_utils import OpenApiModel
-from lakefs_client.exceptions import ApiAttributeError
-
-
-def lazy_import():
- from lakefs_client.model.import_pagination import ImportPagination
- from lakefs_client.model.range_metadata import RangeMetadata
- globals()['ImportPagination'] = ImportPagination
- globals()['RangeMetadata'] = RangeMetadata
-
-
-class IngestRangeCreationResponse(ModelNormal):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
-
- Attributes:
- allowed_values (dict): The key is the tuple path to the attribute
- and the for var_name this is (var_name,). The value is a dict
- with a capitalized key describing the allowed value and an allowed
- value. These dicts store the allowed enum values.
- attribute_map (dict): The key is attribute name
- and the value is json key in definition.
- discriminator_value_class_map (dict): A dict to go from the discriminator
- variable value to the discriminator class name.
- validations (dict): The key is the tuple path to the attribute
- and the for var_name this is (var_name,). The value is a dict
- that stores validations for max_length, min_length, max_items,
- min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
- inclusive_minimum, and regex.
- additional_properties_type (tuple): A tuple of classes accepted
- as additional properties values.
- """
-
- allowed_values = {
- }
-
- validations = {
- }
-
- @cached_property
- def additional_properties_type():
- """
- This must be a method because a model may have properties that are
- of type self, this must run after the class is loaded
- """
- lazy_import()
- return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
-
- _nullable = False
-
- @cached_property
- def openapi_types():
- """
- This must be a method because a model may have properties that are
- of type self, this must run after the class is loaded
-
- Returns
- openapi_types (dict): The key is attribute name
- and the value is attribute type.
- """
- lazy_import()
- return {
- 'range': (RangeMetadata,), # noqa: E501
- 'pagination': (ImportPagination,), # noqa: E501
- }
-
- @cached_property
- def discriminator():
- return None
-
-
- attribute_map = {
- 'range': 'range', # noqa: E501
- 'pagination': 'pagination', # noqa: E501
- }
-
- read_only_vars = {
- }
-
- _composed_schemas = {}
-
- @classmethod
- @convert_js_args_to_python_args
- def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
- """IngestRangeCreationResponse - a model defined in OpenAPI
-
- Keyword Args:
- _check_type (bool): if True, values for parameters in openapi_types
- will be type checked and a TypeError will be
- raised if the wrong type is input.
- Defaults to True
- _path_to_item (tuple/list): This is a list of keys or values to
- drill down to the model in received_data
- when deserializing a response
- _spec_property_naming (bool): True if the variable names in the input data
- are serialized names, as specified in the OpenAPI document.
- False if the variable names in the input data
- are pythonic names, e.g. snake case (default)
- _configuration (Configuration): the instance to use when
- deserializing a file_type parameter.
- If passed, type conversion is attempted
- If omitted no type conversion is done.
- _visited_composed_classes (tuple): This stores a tuple of
- classes that we have traveled through so that
- if we see that class again we will not use its
- discriminator again.
- When traveling through a discriminator, the
- composed schema that is
- is traveled through is added to this set.
- For example if Animal has a discriminator
- petType and we pass in "Dog", and the class Dog
- allOf includes Animal, we move through Animal
- once using the discriminator, and pick Dog.
- Then in Dog, we will make an instance of the
- Animal class but this time we won't travel
- through its discriminator because we passed in
- _visited_composed_classes = (Animal,)
- range (RangeMetadata): [optional] # noqa: E501
- pagination (ImportPagination): [optional] # noqa: E501
- """
-
- _check_type = kwargs.pop('_check_type', True)
- _spec_property_naming = kwargs.pop('_spec_property_naming', False)
- _path_to_item = kwargs.pop('_path_to_item', ())
- _configuration = kwargs.pop('_configuration', None)
- _visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
-
- self = super(OpenApiModel, cls).__new__(cls)
-
- if args:
- raise ApiTypeError(
- "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
- args,
- self.__class__.__name__,
- ),
- path_to_item=_path_to_item,
- valid_classes=(self.__class__,),
- )
-
- self._data_store = {}
- self._check_type = _check_type
- self._spec_property_naming = _spec_property_naming
- self._path_to_item = _path_to_item
- self._configuration = _configuration
- self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
-
- for var_name, var_value in kwargs.items():
- if var_name not in self.attribute_map and \
- self._configuration is not None and \
- self._configuration.discard_unknown_keys and \
- self.additional_properties_type is None:
- # discard variable.
- continue
- setattr(self, var_name, var_value)
- return self
-
- required_properties = set([
- '_data_store',
- '_check_type',
- '_spec_property_naming',
- '_path_to_item',
- '_configuration',
- '_visited_composed_classes',
- ])
-
- @convert_js_args_to_python_args
- def __init__(self, *args, **kwargs): # noqa: E501
- """IngestRangeCreationResponse - a model defined in OpenAPI
-
- Keyword Args:
- _check_type (bool): if True, values for parameters in openapi_types
- will be type checked and a TypeError will be
- raised if the wrong type is input.
- Defaults to True
- _path_to_item (tuple/list): This is a list of keys or values to
- drill down to the model in received_data
- when deserializing a response
- _spec_property_naming (bool): True if the variable names in the input data
- are serialized names, as specified in the OpenAPI document.
- False if the variable names in the input data
- are pythonic names, e.g. snake case (default)
- _configuration (Configuration): the instance to use when
- deserializing a file_type parameter.
- If passed, type conversion is attempted
- If omitted no type conversion is done.
- _visited_composed_classes (tuple): This stores a tuple of
- classes that we have traveled through so that
- if we see that class again we will not use its
- discriminator again.
- When traveling through a discriminator, the
- composed schema that is
- is traveled through is added to this set.
- For example if Animal has a discriminator
- petType and we pass in "Dog", and the class Dog
- allOf includes Animal, we move through Animal
- once using the discriminator, and pick Dog.
- Then in Dog, we will make an instance of the
- Animal class but this time we won't travel
- through its discriminator because we passed in
- _visited_composed_classes = (Animal,)
- range (RangeMetadata): [optional] # noqa: E501
- pagination (ImportPagination): [optional] # noqa: E501
- """
-
- _check_type = kwargs.pop('_check_type', True)
- _spec_property_naming = kwargs.pop('_spec_property_naming', False)
- _path_to_item = kwargs.pop('_path_to_item', ())
- _configuration = kwargs.pop('_configuration', None)
- _visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
-
- if args:
- raise ApiTypeError(
- "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
- args,
- self.__class__.__name__,
- ),
- path_to_item=_path_to_item,
- valid_classes=(self.__class__,),
- )
-
- self._data_store = {}
- self._check_type = _check_type
- self._spec_property_naming = _spec_property_naming
- self._path_to_item = _path_to_item
- self._configuration = _configuration
- self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
-
- for var_name, var_value in kwargs.items():
- if var_name not in self.attribute_map and \
- self._configuration is not None and \
- self._configuration.discard_unknown_keys and \
- self.additional_properties_type is None:
- # discard variable.
- continue
- setattr(self, var_name, var_value)
- if var_name in self.read_only_vars:
- raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
- f"class with read only attributes.")
diff --git a/clients/python/lakefs_client/model/object_stats.py b/clients/python/lakefs_client/model/object_stats.py
index acf36fb7dbe..109bc71c5f6 100644
--- a/clients/python/lakefs_client/model/object_stats.py
+++ b/clients/python/lakefs_client/model/object_stats.py
@@ -168,7 +168,7 @@ def _from_openapi_data(cls, path, path_type, physical_address, checksum, mtime,
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
- physical_address_expiry (int): If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501
+ physical_address_expiry (int): If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501
size_bytes (int): [optional] # noqa: E501
metadata (ObjectUserMetadata): [optional] # noqa: E501
content_type (str): Object media type. [optional] # noqa: E501
@@ -265,7 +265,7 @@ def __init__(self, path, path_type, physical_address, checksum, mtime, *args, **
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
- physical_address_expiry (int): If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501
+ physical_address_expiry (int): If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501
size_bytes (int): [optional] # noqa: E501
metadata (ObjectUserMetadata): [optional] # noqa: E501
content_type (str): Object media type. [optional] # noqa: E501
diff --git a/clients/python/lakefs_client/model/stage_range_creation.py b/clients/python/lakefs_client/model/stage_range_creation.py
deleted file mode 100644
index 7e639a55536..00000000000
--- a/clients/python/lakefs_client/model/stage_range_creation.py
+++ /dev/null
@@ -1,282 +0,0 @@
-"""
- lakeFS API
-
- lakeFS HTTP API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: services@treeverse.io
- Generated by: https://openapi-generator.tech
-"""
-
-
-import re # noqa: F401
-import sys # noqa: F401
-
-from lakefs_client.model_utils import ( # noqa: F401
- ApiTypeError,
- ModelComposed,
- ModelNormal,
- ModelSimple,
- cached_property,
- change_keys_js_to_python,
- convert_js_args_to_python_args,
- date,
- datetime,
- file_type,
- none_type,
- validate_get_composed_info,
-)
-from ..model_utils import OpenApiModel
-from lakefs_client.exceptions import ApiAttributeError
-
-
-
-class StageRangeCreation(ModelNormal):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
-
- Attributes:
- allowed_values (dict): The key is the tuple path to the attribute
- and the for var_name this is (var_name,). The value is a dict
- with a capitalized key describing the allowed value and an allowed
- value. These dicts store the allowed enum values.
- attribute_map (dict): The key is attribute name
- and the value is json key in definition.
- discriminator_value_class_map (dict): A dict to go from the discriminator
- variable value to the discriminator class name.
- validations (dict): The key is the tuple path to the attribute
- and the for var_name this is (var_name,). The value is a dict
- that stores validations for max_length, min_length, max_items,
- min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
- inclusive_minimum, and regex.
- additional_properties_type (tuple): A tuple of classes accepted
- as additional properties values.
- """
-
- allowed_values = {
- }
-
- validations = {
- }
-
- @cached_property
- def additional_properties_type():
- """
- This must be a method because a model may have properties that are
- of type self, this must run after the class is loaded
- """
- return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
-
- _nullable = False
-
- @cached_property
- def openapi_types():
- """
- This must be a method because a model may have properties that are
- of type self, this must run after the class is loaded
-
- Returns
- openapi_types (dict): The key is attribute name
- and the value is attribute type.
- """
- return {
- 'from_source_uri': (str,), # noqa: E501
- 'after': (str,), # noqa: E501
- 'prepend': (str,), # noqa: E501
- 'continuation_token': (str,), # noqa: E501
- 'staging_token': (str,), # noqa: E501
- }
-
- @cached_property
- def discriminator():
- return None
-
-
- attribute_map = {
- 'from_source_uri': 'fromSourceURI', # noqa: E501
- 'after': 'after', # noqa: E501
- 'prepend': 'prepend', # noqa: E501
- 'continuation_token': 'continuation_token', # noqa: E501
- 'staging_token': 'staging_token', # noqa: E501
- }
-
- read_only_vars = {
- }
-
- _composed_schemas = {}
-
- @classmethod
- @convert_js_args_to_python_args
- def _from_openapi_data(cls, from_source_uri, after, prepend, *args, **kwargs): # noqa: E501
- """StageRangeCreation - a model defined in OpenAPI
-
- Args:
- from_source_uri (str): The source location of the ingested files. Must match the lakeFS installation blockstore type.
- after (str): Only objects after this key would be ingested.
- prepend (str): A prefix to prepend to ingested objects.
-
- Keyword Args:
- _check_type (bool): if True, values for parameters in openapi_types
- will be type checked and a TypeError will be
- raised if the wrong type is input.
- Defaults to True
- _path_to_item (tuple/list): This is a list of keys or values to
- drill down to the model in received_data
- when deserializing a response
- _spec_property_naming (bool): True if the variable names in the input data
- are serialized names, as specified in the OpenAPI document.
- False if the variable names in the input data
- are pythonic names, e.g. snake case (default)
- _configuration (Configuration): the instance to use when
- deserializing a file_type parameter.
- If passed, type conversion is attempted
- If omitted no type conversion is done.
- _visited_composed_classes (tuple): This stores a tuple of
- classes that we have traveled through so that
- if we see that class again we will not use its
- discriminator again.
- When traveling through a discriminator, the
- composed schema that is
- is traveled through is added to this set.
- For example if Animal has a discriminator
- petType and we pass in "Dog", and the class Dog
- allOf includes Animal, we move through Animal
- once using the discriminator, and pick Dog.
- Then in Dog, we will make an instance of the
- Animal class but this time we won't travel
- through its discriminator because we passed in
- _visited_composed_classes = (Animal,)
- continuation_token (str): Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key.. [optional] # noqa: E501
- staging_token (str): Opaque. Client should pass staging_token if received from server on previous request. [optional] # noqa: E501
- """
-
- _check_type = kwargs.pop('_check_type', True)
- _spec_property_naming = kwargs.pop('_spec_property_naming', False)
- _path_to_item = kwargs.pop('_path_to_item', ())
- _configuration = kwargs.pop('_configuration', None)
- _visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
-
- self = super(OpenApiModel, cls).__new__(cls)
-
- if args:
- raise ApiTypeError(
- "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
- args,
- self.__class__.__name__,
- ),
- path_to_item=_path_to_item,
- valid_classes=(self.__class__,),
- )
-
- self._data_store = {}
- self._check_type = _check_type
- self._spec_property_naming = _spec_property_naming
- self._path_to_item = _path_to_item
- self._configuration = _configuration
- self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
-
- self.from_source_uri = from_source_uri
- self.after = after
- self.prepend = prepend
- for var_name, var_value in kwargs.items():
- if var_name not in self.attribute_map and \
- self._configuration is not None and \
- self._configuration.discard_unknown_keys and \
- self.additional_properties_type is None:
- # discard variable.
- continue
- setattr(self, var_name, var_value)
- return self
-
- required_properties = set([
- '_data_store',
- '_check_type',
- '_spec_property_naming',
- '_path_to_item',
- '_configuration',
- '_visited_composed_classes',
- ])
-
- @convert_js_args_to_python_args
- def __init__(self, from_source_uri, after, prepend, *args, **kwargs): # noqa: E501
- """StageRangeCreation - a model defined in OpenAPI
-
- Args:
- from_source_uri (str): The source location of the ingested files. Must match the lakeFS installation blockstore type.
- after (str): Only objects after this key would be ingested.
- prepend (str): A prefix to prepend to ingested objects.
-
- Keyword Args:
- _check_type (bool): if True, values for parameters in openapi_types
- will be type checked and a TypeError will be
- raised if the wrong type is input.
- Defaults to True
- _path_to_item (tuple/list): This is a list of keys or values to
- drill down to the model in received_data
- when deserializing a response
- _spec_property_naming (bool): True if the variable names in the input data
- are serialized names, as specified in the OpenAPI document.
- False if the variable names in the input data
- are pythonic names, e.g. snake case (default)
- _configuration (Configuration): the instance to use when
- deserializing a file_type parameter.
- If passed, type conversion is attempted
- If omitted no type conversion is done.
- _visited_composed_classes (tuple): This stores a tuple of
- classes that we have traveled through so that
- if we see that class again we will not use its
- discriminator again.
- When traveling through a discriminator, the
- composed schema that is
- is traveled through is added to this set.
- For example if Animal has a discriminator
- petType and we pass in "Dog", and the class Dog
- allOf includes Animal, we move through Animal
- once using the discriminator, and pick Dog.
- Then in Dog, we will make an instance of the
- Animal class but this time we won't travel
- through its discriminator because we passed in
- _visited_composed_classes = (Animal,)
- continuation_token (str): Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key.. [optional] # noqa: E501
- staging_token (str): Opaque. Client should pass staging_token if received from server on previous request. [optional] # noqa: E501
- """
-
- _check_type = kwargs.pop('_check_type', True)
- _spec_property_naming = kwargs.pop('_spec_property_naming', False)
- _path_to_item = kwargs.pop('_path_to_item', ())
- _configuration = kwargs.pop('_configuration', None)
- _visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
-
- if args:
- raise ApiTypeError(
- "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
- args,
- self.__class__.__name__,
- ),
- path_to_item=_path_to_item,
- valid_classes=(self.__class__,),
- )
-
- self._data_store = {}
- self._check_type = _check_type
- self._spec_property_naming = _spec_property_naming
- self._path_to_item = _path_to_item
- self._configuration = _configuration
- self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
-
- self.from_source_uri = from_source_uri
- self.after = after
- self.prepend = prepend
- for var_name, var_value in kwargs.items():
- if var_name not in self.attribute_map and \
- self._configuration is not None and \
- self._configuration.discard_unknown_keys and \
- self.additional_properties_type is None:
- # discard variable.
- continue
- setattr(self, var_name, var_value)
- if var_name in self.read_only_vars:
- raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
- f"class with read only attributes.")
diff --git a/clients/python/lakefs_client/model/staging_location.py b/clients/python/lakefs_client/model/staging_location.py
index b9ee2d7002a..d56b5cfc04a 100644
--- a/clients/python/lakefs_client/model/staging_location.py
+++ b/clients/python/lakefs_client/model/staging_location.py
@@ -145,8 +145,8 @@ def _from_openapi_data(cls, token, *args, **kwargs): # noqa: E501
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
physical_address (str): [optional] # noqa: E501
- presigned_url (str, none_type): if presign=true is passed in the request, this field will contain a presigned URL to use when uploading. [optional] # noqa: E501
- presigned_url_expiry (int): If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501
+ presigned_url (str, none_type): if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading. [optional] # noqa: E501
+ presigned_url_expiry (int): If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
@@ -233,8 +233,8 @@ def __init__(self, token, *args, **kwargs): # noqa: E501
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
physical_address (str): [optional] # noqa: E501
- presigned_url (str, none_type): if presign=true is passed in the request, this field will contain a presigned URL to use when uploading. [optional] # noqa: E501
- presigned_url_expiry (int): If present and nonzero, physical_address is a presigned URL and will expire at this Unix Epoch time. This will be shorter than the presigned URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501
+ presigned_url (str, none_type): if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading. [optional] # noqa: E501
+ presigned_url_expiry (int): If present and nonzero, physical_address is a pre-signed URL and will expire at this Unix Epoch time. This will be shorter than the pre-signed URL lifetime if an authentication token is about to expire. This field is *optional*. . [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
diff --git a/clients/python/lakefs_client/model/update_password_by_token.py b/clients/python/lakefs_client/model/update_password_by_token.py
deleted file mode 100644
index f6b5cd5fe0c..00000000000
--- a/clients/python/lakefs_client/model/update_password_by_token.py
+++ /dev/null
@@ -1,272 +0,0 @@
-"""
- lakeFS API
-
- lakeFS HTTP API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: services@treeverse.io
- Generated by: https://openapi-generator.tech
-"""
-
-
-import re # noqa: F401
-import sys # noqa: F401
-
-from lakefs_client.model_utils import ( # noqa: F401
- ApiTypeError,
- ModelComposed,
- ModelNormal,
- ModelSimple,
- cached_property,
- change_keys_js_to_python,
- convert_js_args_to_python_args,
- date,
- datetime,
- file_type,
- none_type,
- validate_get_composed_info,
-)
-from ..model_utils import OpenApiModel
-from lakefs_client.exceptions import ApiAttributeError
-
-
-
-class UpdatePasswordByToken(ModelNormal):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
-
- Attributes:
- allowed_values (dict): The key is the tuple path to the attribute
- and the for var_name this is (var_name,). The value is a dict
- with a capitalized key describing the allowed value and an allowed
- value. These dicts store the allowed enum values.
- attribute_map (dict): The key is attribute name
- and the value is json key in definition.
- discriminator_value_class_map (dict): A dict to go from the discriminator
- variable value to the discriminator class name.
- validations (dict): The key is the tuple path to the attribute
- and the for var_name this is (var_name,). The value is a dict
- that stores validations for max_length, min_length, max_items,
- min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
- inclusive_minimum, and regex.
- additional_properties_type (tuple): A tuple of classes accepted
- as additional properties values.
- """
-
- allowed_values = {
- }
-
- validations = {
- }
-
- @cached_property
- def additional_properties_type():
- """
- This must be a method because a model may have properties that are
- of type self, this must run after the class is loaded
- """
- return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
-
- _nullable = False
-
- @cached_property
- def openapi_types():
- """
- This must be a method because a model may have properties that are
- of type self, this must run after the class is loaded
-
- Returns
- openapi_types (dict): The key is attribute name
- and the value is attribute type.
- """
- return {
- 'token': (str,), # noqa: E501
- 'new_password': (str,), # noqa: E501
- 'email': (str,), # noqa: E501
- }
-
- @cached_property
- def discriminator():
- return None
-
-
- attribute_map = {
- 'token': 'token', # noqa: E501
- 'new_password': 'newPassword', # noqa: E501
- 'email': 'email', # noqa: E501
- }
-
- read_only_vars = {
- }
-
- _composed_schemas = {}
-
- @classmethod
- @convert_js_args_to_python_args
- def _from_openapi_data(cls, token, new_password, *args, **kwargs): # noqa: E501
- """UpdatePasswordByToken - a model defined in OpenAPI
-
- Args:
- token (str): token used for authentication
- new_password (str): new password to update
-
- Keyword Args:
- _check_type (bool): if True, values for parameters in openapi_types
- will be type checked and a TypeError will be
- raised if the wrong type is input.
- Defaults to True
- _path_to_item (tuple/list): This is a list of keys or values to
- drill down to the model in received_data
- when deserializing a response
- _spec_property_naming (bool): True if the variable names in the input data
- are serialized names, as specified in the OpenAPI document.
- False if the variable names in the input data
- are pythonic names, e.g. snake case (default)
- _configuration (Configuration): the instance to use when
- deserializing a file_type parameter.
- If passed, type conversion is attempted
- If omitted no type conversion is done.
- _visited_composed_classes (tuple): This stores a tuple of
- classes that we have traveled through so that
- if we see that class again we will not use its
- discriminator again.
- When traveling through a discriminator, the
- composed schema that is
- is traveled through is added to this set.
- For example if Animal has a discriminator
- petType and we pass in "Dog", and the class Dog
- allOf includes Animal, we move through Animal
- once using the discriminator, and pick Dog.
- Then in Dog, we will make an instance of the
- Animal class but this time we won't travel
- through its discriminator because we passed in
- _visited_composed_classes = (Animal,)
- email (str): optional user email to match the token for verification. [optional] # noqa: E501
- """
-
- _check_type = kwargs.pop('_check_type', True)
- _spec_property_naming = kwargs.pop('_spec_property_naming', False)
- _path_to_item = kwargs.pop('_path_to_item', ())
- _configuration = kwargs.pop('_configuration', None)
- _visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
-
- self = super(OpenApiModel, cls).__new__(cls)
-
- if args:
- raise ApiTypeError(
- "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
- args,
- self.__class__.__name__,
- ),
- path_to_item=_path_to_item,
- valid_classes=(self.__class__,),
- )
-
- self._data_store = {}
- self._check_type = _check_type
- self._spec_property_naming = _spec_property_naming
- self._path_to_item = _path_to_item
- self._configuration = _configuration
- self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
-
- self.token = token
- self.new_password = new_password
- for var_name, var_value in kwargs.items():
- if var_name not in self.attribute_map and \
- self._configuration is not None and \
- self._configuration.discard_unknown_keys and \
- self.additional_properties_type is None:
- # discard variable.
- continue
- setattr(self, var_name, var_value)
- return self
-
- required_properties = set([
- '_data_store',
- '_check_type',
- '_spec_property_naming',
- '_path_to_item',
- '_configuration',
- '_visited_composed_classes',
- ])
-
- @convert_js_args_to_python_args
- def __init__(self, token, new_password, *args, **kwargs): # noqa: E501
- """UpdatePasswordByToken - a model defined in OpenAPI
-
- Args:
- token (str): token used for authentication
- new_password (str): new password to update
-
- Keyword Args:
- _check_type (bool): if True, values for parameters in openapi_types
- will be type checked and a TypeError will be
- raised if the wrong type is input.
- Defaults to True
- _path_to_item (tuple/list): This is a list of keys or values to
- drill down to the model in received_data
- when deserializing a response
- _spec_property_naming (bool): True if the variable names in the input data
- are serialized names, as specified in the OpenAPI document.
- False if the variable names in the input data
- are pythonic names, e.g. snake case (default)
- _configuration (Configuration): the instance to use when
- deserializing a file_type parameter.
- If passed, type conversion is attempted
- If omitted no type conversion is done.
- _visited_composed_classes (tuple): This stores a tuple of
- classes that we have traveled through so that
- if we see that class again we will not use its
- discriminator again.
- When traveling through a discriminator, the
- composed schema that is
- is traveled through is added to this set.
- For example if Animal has a discriminator
- petType and we pass in "Dog", and the class Dog
- allOf includes Animal, we move through Animal
- once using the discriminator, and pick Dog.
- Then in Dog, we will make an instance of the
- Animal class but this time we won't travel
- through its discriminator because we passed in
- _visited_composed_classes = (Animal,)
- email (str): optional user email to match the token for verification. [optional] # noqa: E501
- """
-
- _check_type = kwargs.pop('_check_type', True)
- _spec_property_naming = kwargs.pop('_spec_property_naming', False)
- _path_to_item = kwargs.pop('_path_to_item', ())
- _configuration = kwargs.pop('_configuration', None)
- _visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
-
- if args:
- raise ApiTypeError(
- "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
- args,
- self.__class__.__name__,
- ),
- path_to_item=_path_to_item,
- valid_classes=(self.__class__,),
- )
-
- self._data_store = {}
- self._check_type = _check_type
- self._spec_property_naming = _spec_property_naming
- self._path_to_item = _path_to_item
- self._configuration = _configuration
- self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
-
- self.token = token
- self.new_password = new_password
- for var_name, var_value in kwargs.items():
- if var_name not in self.attribute_map and \
- self._configuration is not None and \
- self._configuration.discard_unknown_keys and \
- self.additional_properties_type is None:
- # discard variable.
- continue
- setattr(self, var_name, var_value)
- if var_name in self.read_only_vars:
- raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
- f"class with read only attributes.")
diff --git a/clients/python/lakefs_client/model/user.py b/clients/python/lakefs_client/model/user.py
index f39590763fb..8128380762a 100644
--- a/clients/python/lakefs_client/model/user.py
+++ b/clients/python/lakefs_client/model/user.py
@@ -85,7 +85,6 @@ def openapi_types():
'id': (str,), # noqa: E501
'creation_date': (int,), # noqa: E501
'friendly_name': (str,), # noqa: E501
- 'email': (str,), # noqa: E501
}
@cached_property
@@ -97,7 +96,6 @@ def discriminator():
'id': 'id', # noqa: E501
'creation_date': 'creation_date', # noqa: E501
'friendly_name': 'friendly_name', # noqa: E501
- 'email': 'email', # noqa: E501
}
read_only_vars = {
@@ -111,7 +109,7 @@ def _from_openapi_data(cls, id, creation_date, *args, **kwargs): # noqa: E501
"""User - a model defined in OpenAPI
Args:
- id (str): a unique identifier for the user. In password-based authentication, this is the email.
+ id (str): a unique identifier for the user.
creation_date (int): Unix Epoch in seconds
Keyword Args:
@@ -146,7 +144,6 @@ def _from_openapi_data(cls, id, creation_date, *args, **kwargs): # noqa: E501
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
friendly_name (str): [optional] # noqa: E501
- email (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
@@ -200,7 +197,7 @@ def __init__(self, id, creation_date, *args, **kwargs): # noqa: E501
"""User - a model defined in OpenAPI
Args:
- id (str): a unique identifier for the user. In password-based authentication, this is the email.
+ id (str): a unique identifier for the user.
creation_date (int): Unix Epoch in seconds
Keyword Args:
@@ -235,7 +232,6 @@ def __init__(self, id, creation_date, *args, **kwargs): # noqa: E501
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
friendly_name (str): [optional] # noqa: E501
- email (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
diff --git a/clients/python/lakefs_client/model/user_creation.py b/clients/python/lakefs_client/model/user_creation.py
index 0ecb59f4033..db179bbfb68 100644
--- a/clients/python/lakefs_client/model/user_creation.py
+++ b/clients/python/lakefs_client/model/user_creation.py
@@ -107,7 +107,7 @@ def _from_openapi_data(cls, id, *args, **kwargs): # noqa: E501
"""UserCreation - a model defined in OpenAPI
Args:
- id (str): a unique identifier for the user. In password-based authentication, this is the email.
+ id (str): a unique identifier for the user.
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
@@ -193,7 +193,7 @@ def __init__(self, id, *args, **kwargs): # noqa: E501
"""UserCreation - a model defined in OpenAPI
Args:
- id (str): a unique identifier for the user. In password-based authentication, this is the email.
+ id (str): a unique identifier for the user.
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
diff --git a/clients/python/lakefs_client/models/__init__.py b/clients/python/lakefs_client/models/__init__.py
index 9b1f2e12104..30a1c3059cb 100644
--- a/clients/python/lakefs_client/models/__init__.py
+++ b/clients/python/lakefs_client/models/__init__.py
@@ -32,7 +32,6 @@
from lakefs_client.model.error import Error
from lakefs_client.model.error_no_acl import ErrorNoACL
from lakefs_client.model.find_merge_base_result import FindMergeBaseResult
-from lakefs_client.model.forgot_password_request import ForgotPasswordRequest
from lakefs_client.model.garbage_collection_config import GarbageCollectionConfig
from lakefs_client.model.garbage_collection_prepare_request import GarbageCollectionPrepareRequest
from lakefs_client.model.garbage_collection_prepare_response import GarbageCollectionPrepareResponse
@@ -46,9 +45,7 @@
from lakefs_client.model.import_creation import ImportCreation
from lakefs_client.model.import_creation_response import ImportCreationResponse
from lakefs_client.model.import_location import ImportLocation
-from lakefs_client.model.import_pagination import ImportPagination
-from lakefs_client.model.import_status_resp import ImportStatusResp
-from lakefs_client.model.ingest_range_creation_response import IngestRangeCreationResponse
+from lakefs_client.model.import_status import ImportStatus
from lakefs_client.model.inline_object import InlineObject
from lakefs_client.model.inline_object1 import InlineObject1
from lakefs_client.model.login_config import LoginConfig
@@ -85,7 +82,6 @@
from lakefs_client.model.revert_creation import RevertCreation
from lakefs_client.model.setup import Setup
from lakefs_client.model.setup_state import SetupState
-from lakefs_client.model.stage_range_creation import StageRangeCreation
from lakefs_client.model.staging_location import StagingLocation
from lakefs_client.model.staging_metadata import StagingMetadata
from lakefs_client.model.statement import Statement
@@ -95,7 +91,6 @@
from lakefs_client.model.storage_uri import StorageURI
from lakefs_client.model.tag_creation import TagCreation
from lakefs_client.model.underlying_object_properties import UnderlyingObjectProperties
-from lakefs_client.model.update_password_by_token import UpdatePasswordByToken
from lakefs_client.model.update_token import UpdateToken
from lakefs_client.model.user import User
from lakefs_client.model.user_creation import UserCreation
diff --git a/clients/python/test/test_auth_api.py b/clients/python/test/test_auth_api.py
index 364f31b6b0e..1a92c1b8ee1 100644
--- a/clients/python/test/test_auth_api.py
+++ b/clients/python/test/test_auth_api.py
@@ -122,13 +122,6 @@ def test_detach_policy_from_user(self):
"""
pass
- def test_forgot_password(self):
- """Test case for forgot_password
-
- forgot password request initiates the password reset process # noqa: E501
- """
- pass
-
def test_get_credentials(self):
"""Test case for get_credentials
@@ -241,13 +234,6 @@ def test_set_group_acl(self):
"""
pass
- def test_update_password(self):
- """Test case for update_password
-
- Update user password by reset_password token # noqa: E501
- """
- pass
-
def test_update_policy(self):
"""Test case for update_policy
diff --git a/clients/python/test/test_forgot_password_request.py b/clients/python/test/test_forgot_password_request.py
deleted file mode 100644
index 217daa1fb2c..00000000000
--- a/clients/python/test/test_forgot_password_request.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
- lakeFS API
-
- lakeFS HTTP API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: services@treeverse.io
- Generated by: https://openapi-generator.tech
-"""
-
-
-import sys
-import unittest
-
-import lakefs_client
-from lakefs_client.model.forgot_password_request import ForgotPasswordRequest
-
-
-class TestForgotPasswordRequest(unittest.TestCase):
- """ForgotPasswordRequest unit test stubs"""
-
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- def testForgotPasswordRequest(self):
- """Test ForgotPasswordRequest"""
- # FIXME: construct object with mandatory attributes with example values
- # model = ForgotPasswordRequest() # noqa: E501
- pass
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/clients/python/test/test_import_api.py b/clients/python/test/test_import_api.py
index cef413386eb..079eed8c1b6 100644
--- a/clients/python/test/test_import_api.py
+++ b/clients/python/test/test_import_api.py
@@ -24,13 +24,6 @@ def setUp(self):
def tearDown(self):
pass
- def test_create_meta_range(self):
- """Test case for create_meta_range
-
- create a lakeFS metarange file from the given ranges # noqa: E501
- """
- pass
-
def test_import_cancel(self):
"""Test case for import_cancel
@@ -52,13 +45,6 @@ def test_import_status(self):
"""
pass
- def test_ingest_range(self):
- """Test case for ingest_range
-
- create a lakeFS range file from the source uri # noqa: E501
- """
- pass
-
if __name__ == '__main__':
unittest.main()
diff --git a/clients/python/test/test_import_pagination.py b/clients/python/test/test_import_pagination.py
deleted file mode 100644
index f65f829c102..00000000000
--- a/clients/python/test/test_import_pagination.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
- lakeFS API
-
- lakeFS HTTP API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: services@treeverse.io
- Generated by: https://openapi-generator.tech
-"""
-
-
-import sys
-import unittest
-
-import lakefs_client
-from lakefs_client.model.import_pagination import ImportPagination
-
-
-class TestImportPagination(unittest.TestCase):
- """ImportPagination unit test stubs"""
-
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- def testImportPagination(self):
- """Test ImportPagination"""
- # FIXME: construct object with mandatory attributes with example values
- # model = ImportPagination() # noqa: E501
- pass
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/clients/python/test/test_import_status_resp.py b/clients/python/test/test_import_status.py
similarity index 68%
rename from clients/python/test/test_import_status_resp.py
rename to clients/python/test/test_import_status.py
index 2cc4dd8b245..c7a593b0ecc 100644
--- a/clients/python/test/test_import_status_resp.py
+++ b/clients/python/test/test_import_status.py
@@ -17,11 +17,11 @@
from lakefs_client.model.error import Error
globals()['Commit'] = Commit
globals()['Error'] = Error
-from lakefs_client.model.import_status_resp import ImportStatusResp
+from lakefs_client.model.import_status import ImportStatus
-class TestImportStatusResp(unittest.TestCase):
- """ImportStatusResp unit test stubs"""
+class TestImportStatus(unittest.TestCase):
+ """ImportStatus unit test stubs"""
def setUp(self):
pass
@@ -29,10 +29,10 @@ def setUp(self):
def tearDown(self):
pass
- def testImportStatusResp(self):
- """Test ImportStatusResp"""
+ def testImportStatus(self):
+ """Test ImportStatus"""
# FIXME: construct object with mandatory attributes with example values
- # model = ImportStatusResp() # noqa: E501
+ # model = ImportStatus() # noqa: E501
pass
diff --git a/clients/python/test/test_ingest_range_creation_response.py b/clients/python/test/test_ingest_range_creation_response.py
deleted file mode 100644
index 6d3fb9326ae..00000000000
--- a/clients/python/test/test_ingest_range_creation_response.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
- lakeFS API
-
- lakeFS HTTP API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: services@treeverse.io
- Generated by: https://openapi-generator.tech
-"""
-
-
-import sys
-import unittest
-
-import lakefs_client
-from lakefs_client.model.import_pagination import ImportPagination
-from lakefs_client.model.range_metadata import RangeMetadata
-globals()['ImportPagination'] = ImportPagination
-globals()['RangeMetadata'] = RangeMetadata
-from lakefs_client.model.ingest_range_creation_response import IngestRangeCreationResponse
-
-
-class TestIngestRangeCreationResponse(unittest.TestCase):
- """IngestRangeCreationResponse unit test stubs"""
-
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- def testIngestRangeCreationResponse(self):
- """Test IngestRangeCreationResponse"""
- # FIXME: construct object with mandatory attributes with example values
- # model = IngestRangeCreationResponse() # noqa: E501
- pass
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/clients/python/test/test_internal_api.py b/clients/python/test/test_internal_api.py
index 5ccb8f17450..96637ea841f 100644
--- a/clients/python/test/test_internal_api.py
+++ b/clients/python/test/test_internal_api.py
@@ -71,13 +71,6 @@ def test_setup_comm_prefs(self):
"""
pass
- def test_update_branch_token(self):
- """Test case for update_branch_token
-
- modify branch staging token # noqa: E501
- """
- pass
-
def test_upload_object_preflight(self):
"""Test case for upload_object_preflight
diff --git a/clients/python/test/test_stage_range_creation.py b/clients/python/test/test_stage_range_creation.py
deleted file mode 100644
index f75cbff5acd..00000000000
--- a/clients/python/test/test_stage_range_creation.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
- lakeFS API
-
- lakeFS HTTP API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: services@treeverse.io
- Generated by: https://openapi-generator.tech
-"""
-
-
-import sys
-import unittest
-
-import lakefs_client
-from lakefs_client.model.stage_range_creation import StageRangeCreation
-
-
-class TestStageRangeCreation(unittest.TestCase):
- """StageRangeCreation unit test stubs"""
-
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- def testStageRangeCreation(self):
- """Test StageRangeCreation"""
- # FIXME: construct object with mandatory attributes with example values
- # model = StageRangeCreation() # noqa: E501
- pass
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/clients/python/test/test_update_password_by_token.py b/clients/python/test/test_update_password_by_token.py
deleted file mode 100644
index c1779e542de..00000000000
--- a/clients/python/test/test_update_password_by_token.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
- lakeFS API
-
- lakeFS HTTP API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: services@treeverse.io
- Generated by: https://openapi-generator.tech
-"""
-
-
-import sys
-import unittest
-
-import lakefs_client
-from lakefs_client.model.update_password_by_token import UpdatePasswordByToken
-
-
-class TestUpdatePasswordByToken(unittest.TestCase):
- """UpdatePasswordByToken unit test stubs"""
-
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- def testUpdatePasswordByToken(self):
- """Test UpdatePasswordByToken"""
- # FIXME: construct object with mandatory attributes with example values
- # model = UpdatePasswordByToken() # noqa: E501
- pass
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/cmd/lakectl/cmd/config.go b/cmd/lakectl/cmd/config.go
index 51c9ebec056..96b0f0acea8 100644
--- a/cmd/lakectl/cmd/config.go
+++ b/cmd/lakectl/cmd/config.go
@@ -30,15 +30,33 @@ var configCmd = &cobra.Command{
// get user input
questions := []struct {
Key string
- Prompt *promptui.Prompt
+ Prompt promptui.Prompt
}{
- {Key: "credentials.access_key_id", Prompt: &promptui.Prompt{Label: "Access key ID"}},
- {Key: "credentials.secret_access_key", Prompt: &promptui.Prompt{Label: "Secret access key", Mask: '*'}},
- {Key: "server.endpoint_url", Prompt: &promptui.Prompt{Label: "Server endpoint URL", Validate: func(rawURL string) error {
- _, err := url.ParseRequestURI(rawURL)
- return err
- }}},
+ {
+ Key: "credentials.access_key_id",
+ Prompt: promptui.Prompt{
+ Label: "Access key ID",
+ },
+ },
+ {
+ Key: "credentials.secret_access_key",
+ Prompt: promptui.Prompt{
+ Label: "Secret access key",
+ Mask: '*',
+ },
+ },
+ {
+ Key: "server.endpoint_url",
+ Prompt: promptui.Prompt{
+ Label: "Server endpoint URL (e.g. http://localhost:8000)",
+ Validate: func(rawURL string) error {
+ _, err := url.ParseRequestURI(rawURL)
+ return err
+ },
+ },
+ },
}
+
for _, question := range questions {
question.Prompt.Default = viper.GetString(question.Key)
val, err := question.Prompt.Run()
diff --git a/cmd/lakectl/cmd/docs.go b/cmd/lakectl/cmd/docs.go
index 1a8f23ebb2e..ddcdcbf91ef 100644
--- a/cmd/lakectl/cmd/docs.go
+++ b/cmd/lakectl/cmd/docs.go
@@ -16,6 +16,7 @@ description: lakeFS comes with its own native CLI client. Here you can see the c
parent: Reference
redirect_from:
- /reference/commands.html
+ - /quickstart/lakefs_cli.html
---
{% comment %}
@@ -52,7 +53,7 @@ lakectl config
# Config file /home/janedoe/.lakectl.yaml will be used
# Access key ID: AKIAIOSFODNN7EXAMPLE
# Secret access key: ****************************************
-# Server endpoint URL: http://localhost:8000/api/v1
+# Server endpoint URL: http://localhost:8000
` + "```" + `
This will setup a ` + "`$HOME/.lakectl.yaml`" + ` file with the credentials and API endpoint you've supplied.
diff --git a/cmd/lakectl/cmd/local.go b/cmd/lakectl/cmd/local.go
index be02d5683ff..c96372faa3c 100644
--- a/cmd/lakectl/cmd/local.go
+++ b/cmd/lakectl/cmd/local.go
@@ -99,7 +99,7 @@ func getLocalSyncFlags(cmd *cobra.Command, client *apigen.ClientWithResponses) s
}
// getLocalArgs parses arguments to extract a remote URI and deduces the local path.
-// If local path isn't provided and considerGitRoot is true, it uses the git repository root.
+// If the local path isn't provided and considerGitRoot is true, it uses the git repository root.
func getLocalArgs(args []string, requireRemote bool, considerGitRoot bool) (remote *uri.URI, localPath string) {
idx := 0
if requireRemote {
diff --git a/cmd/lakectl/cmd/local_clone.go b/cmd/lakectl/cmd/local_clone.go
index 0b18c224f86..09c615eb79b 100644
--- a/cmd/lakectl/cmd/local_clone.go
+++ b/cmd/lakectl/cmd/local_clone.go
@@ -46,9 +46,9 @@ var localCloneCmd = &cobra.Command{
}
stableRemote := remote.WithRef(head)
// Dynamically construct changes
- c := make(chan *local.Change, filesChanSize)
+ ch := make(chan *local.Change, filesChanSize)
go func() {
- defer close(c)
+ defer close(ch)
remotePath := remote.GetPath()
var after string
for {
@@ -70,7 +70,7 @@ var localCloneCmd = &cobra.Command{
if relPath == "" || strings.HasSuffix(relPath, uri.PathSeparator) {
continue
}
- c <- &local.Change{
+ ch <- &local.Change{
Source: local.ChangeSourceRemote,
Path: relPath,
Type: local.ChangeTypeAdded,
@@ -88,7 +88,7 @@ var localCloneCmd = &cobra.Command{
}
sigCtx := localHandleSyncInterrupt(ctx, idx, string(cloneOperation))
s := local.NewSyncManager(sigCtx, client, syncFlags.parallelism, syncFlags.presign)
- err = s.Sync(localPath, stableRemote, c)
+ err = s.Sync(localPath, stableRemote, ch)
if err != nil {
DieErr(err)
}
diff --git a/cmd/lakectl/cmd/log.go b/cmd/lakectl/cmd/log.go
index 09936a60039..3e3f961f6b0 100644
--- a/cmd/lakectl/cmd/log.go
+++ b/cmd/lakectl/cmd/log.go
@@ -56,7 +56,7 @@ func (d *dotWriter) Write(commits []apigen.Commit) {
label = fmt.Sprintf("%s", label)
}
baseURL := strings.TrimSuffix(strings.TrimSuffix(
- string(cfg.Server.EndpointURL), "/api/v1"), "/")
+ string(cfg.Server.EndpointURL), apiutil.BaseURL), "/")
_, _ = fmt.Fprintf(d.w, "\n\t\"%s\" [shape=note target=\"_blank\" href=\"%s/repositories/%s/commits/%s\" label=< %s >]\n",
commit.Id, baseURL, repoID, commit.Id, label)
for _, parent := range commit.Parents {
diff --git a/docs/assets/js/swagger.yml b/docs/assets/js/swagger.yml
index 9ebc23a52eb..3b9ebba8be6 100644
--- a/docs/assets/js/swagger.yml
+++ b/docs/assets/js/swagger.yml
@@ -5,7 +5,7 @@ info:
title: lakeFS API
license:
name: "Apache 2.0"
- url: http://www.apache.org/licenses/LICENSE-2.0.html
+ url: https://www.apache.org/licenses/LICENSE-2.0.html
version: 0.1.0
servers:
@@ -151,25 +151,6 @@ components:
minimum: 0
description: Maximal number of entries per page
- ImportPagination:
- type: object
- required:
- - has_more
- - last_key
- properties:
- has_more:
- type: boolean
- description: More keys to be ingested.
- continuation_token:
- type: string
- description: Opaque. Token used to import the next range.
- last_key:
- type: string
- description: Last object store key that was ingested.
- staging_token:
- type: string
- description: Staging token for skipped objects during ingest
-
Repository:
type: object
required:
@@ -290,9 +271,9 @@ components:
type: integer
format: int64
description: |
- If present and nonzero, physical_address is a presigned URL and
+ If present and nonzero, physical_address is a pre-signed URL and
will expire at this Unix Epoch time. This will be shorter than
- the presigned URL lifetime if an authentication token is about
+ the pre-signed URL lifetime if an authentication token is about
to expire.
This field is *optional*.
@@ -691,15 +672,13 @@ components:
properties:
id:
type: string
- description: a unique identifier for the user. In password-based authentication, this is the email.
+ description: a unique identifier for the user.
creation_date:
type: integer
format: int64
description: Unix Epoch in seconds
friendly_name:
type: string
- email:
- type: string
CurrentUser:
type: object
@@ -709,20 +688,12 @@ components:
user:
$ref: "#/components/schemas/User"
- ForgotPasswordRequest:
- type: object
- required:
- - email
- properties:
- email:
- type: string
-
UserCreation:
type: object
properties:
id:
type: string
- description: a unique identifier for the user. In password-based authentication, this is the email.
+ description: a unique identifier for the user.
invite_user:
type: boolean
required:
@@ -821,21 +792,6 @@ components:
- featureUpdates
- securityUpdates
- UpdatePasswordByToken:
- type: object
- properties:
- token:
- description: token used for authentication
- type: string
- newPassword:
- description: new password to update
- type: string
- email:
- description: optional user email to match the token for verification
- type: string
- required:
- - token
- - newPassword
Credentials:
type: object
@@ -1157,14 +1113,14 @@ components:
presigned_url:
type: string
nullable: true
- description: if presign=true is passed in the request, this field will contain a presigned URL to use when uploading
+ description: if presign=true is passed in the request, this field will contain a pre-signed URL to use when uploading
presigned_url_expiry:
type: integer
format: int64
description: |
- If present and nonzero, physical_address is a presigned URL and
+ If present and nonzero, physical_address is a pre-signed URL and
will expire at this Unix Epoch time. This will be shorter than
- the presigned URL lifetime if an authentication token is about
+ the pre-signed URL lifetime if an authentication token is about
to expire.
This field is *optional*.
@@ -1292,7 +1248,7 @@ components:
description: Path type, can either be 'common_prefix' or 'object'
path:
type: string
- description: A source location to ingested path or to a single object. Must match the lakeFS installation blockstore type.
+ description: A source location to import path or to a single object. Must match the lakeFS installation blockstore type.
example: s3://my-bucket/production/collections/
destination:
type: string
@@ -1320,32 +1276,6 @@ components:
destination: collections/file1
type: object
- StageRangeCreation:
- type: object
- required:
- - fromSourceURI
- - after
- - prepend
- properties:
- fromSourceURI:
- type: string
- description: The source location of the ingested files. Must match the lakeFS installation blockstore type.
- example: s3://my-bucket/production/collections/
- after:
- type: string
- description: Only objects after this key would be ingested.
- example: production/collections/some/file.parquet
- prepend:
- type: string
- description: A prefix to prepend to ingested objects.
- example: collections/
- continuation_token:
- type: string
- description: Opaque. Client should pass the continuation_token received from server to continue creation ranges from the same key.
- staging_token:
- type: string
- description: Opaque. Client should pass staging_token if received from server on previous request
-
RangeMetadata:
type: object
required:
@@ -1374,15 +1304,7 @@ components:
type: integer
description: Estimated size of the range in bytes
- IngestRangeCreationResponse:
- type: object
- properties:
- range:
- $ref: "#/components/schemas/RangeMetadata"
- pagination:
- $ref: "#/components/schemas/ImportPagination"
-
- ImportStatusResp:
+ ImportStatus:
type: object
properties:
completed:
@@ -1587,48 +1509,7 @@ paths:
default:
$ref: "#/components/responses/ServerError"
- /auth/password:
- post:
- tags:
- - auth
- operationId: updatePassword
- summary: Update user password by reset_password token
- security:
- - cookie_auth: []
- requestBody:
- required: true
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/UpdatePasswordByToken"
- responses:
- 201:
- description: successful reset
- 401:
- $ref: "#/components/responses/Unauthorized"
- default:
- $ref: "#/components/responses/ServerError"
- /auth/password/forgot:
- post:
- tags:
- - auth
- operationId: forgotPassword
- summary: forgot password request initiates the password reset process
- security: []
- requestBody:
- required: true
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/ForgotPasswordRequest"
- responses:
- 204:
- description: No content
- 400:
- $ref: "#/components/responses/BadRequest"
- default:
- $ref: "#/components/responses/ServerError"
/auth/capabilities:
get:
@@ -1955,7 +1836,7 @@ paths:
- $ref: "#/components/parameters/PaginationAmount"
responses:
200:
- description: group memeber list
+ description: group member list
content:
application/json:
schema:
@@ -3476,7 +3357,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/ImportStatusResp"
+ $ref: "#/components/schemas/ImportStatus"
401:
$ref: "#/components/responses/Unauthorized"
404:
@@ -3535,114 +3416,6 @@ paths:
default:
$ref: "#/components/responses/ServerError"
- /repositories/{repository}/branches/metaranges:
- parameters:
- - in: path
- name: repository
- required: true
- schema:
- type: string
- post:
- tags:
- - import
- operationId: createMetaRange
- summary: create a lakeFS metarange file from the given ranges
- requestBody:
- required: true
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/MetaRangeCreation"
- responses:
- 201:
- description: metarange metadata
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/MetaRangeCreationResponse"
- 400:
- $ref: "#/components/responses/ValidationError"
- 401:
- $ref: "#/components/responses/Unauthorized"
- 403:
- $ref: "#/components/responses/Forbidden"
- 404:
- $ref: "#/components/responses/NotFound"
- default:
- $ref: "#/components/responses/ServerError"
-
- /repositories/{repository}/branches/ranges:
- parameters:
- - in: path
- name: repository
- required: true
- schema:
- type: string
- post:
- tags:
- - import
- operationId: ingestRange
- summary: create a lakeFS range file from the source uri
- requestBody:
- required: true
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/StageRangeCreation"
- responses:
- 201:
- description: range metadata
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/IngestRangeCreationResponse"
-
- 400:
- $ref: "#/components/responses/ValidationError"
- 401:
- $ref: "#/components/responses/Unauthorized"
- 404:
- $ref: "#/components/responses/NotFound"
- default:
- $ref: "#/components/responses/ServerError"
-
- /repositories/{repository}/branches/{branch}/update_token:
- parameters:
- - in: path
- name: repository
- required: true
- schema:
- type: string
- - in: path
- name: branch
- required: true
- schema:
- type: string
- put:
- tags:
- - internal
- operationId: updateBranchToken
- summary: modify branch staging token
- requestBody:
- required: true
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/UpdateToken"
- responses:
- 204:
- description: branch updated successfully
- 400:
- $ref: "#/components/responses/ValidationError"
- 401:
- $ref: "#/components/responses/Unauthorized"
- 403:
- $ref: "#/components/responses/Forbidden"
- 404:
- $ref: "#/components/responses/NotFound"
- default:
- $ref: "#/components/responses/ServerError"
-
/repositories/{repository}/branches/{branch}/objects/stage_allowed:
parameters:
- in: path
diff --git a/docs/howto/deploy/onprem.md b/docs/howto/deploy/onprem.md
index 981531210e4..f666327838c 100644
--- a/docs/howto/deploy/onprem.md
+++ b/docs/howto/deploy/onprem.md
@@ -9,6 +9,7 @@ redirect_from:
- /integrations/minio.html
- /using/minio.html
- /deploy/onprem.html
+ - /deploying/install.html
next: ["Import data into your installation", "/howto/import.html"]
---
diff --git a/docs/howto/garbage-collection/index.md b/docs/howto/garbage-collection/index.md
index 3e09b22c36d..497069244f9 100644
--- a/docs/howto/garbage-collection/index.md
+++ b/docs/howto/garbage-collection/index.md
@@ -7,6 +7,7 @@ redirect_from:
- /reference/garbage-collection.html
- /howto/garbage-collection-index.html
- /howto/garbage-collection.html
+ - /reference/retention.html
---
# Garbage Collection
diff --git a/docs/howto/hooks/index.md b/docs/howto/hooks/index.md
index 76abfc0de7e..79f13f8318a 100644
--- a/docs/howto/hooks/index.md
+++ b/docs/howto/hooks/index.md
@@ -10,6 +10,7 @@ redirect_from:
- /hooks/overview.html
- /hooks/index.html
- /hooks/
+ - /setup/hooks.html
---
# Actions and Hooks in lakeFS
diff --git a/docs/howto/unity-delta-sharing.md b/docs/howto/unity-delta-sharing.md
index 0a82b3b1a06..95b4b015c98 100644
--- a/docs/howto/unity-delta-sharing.md
+++ b/docs/howto/unity-delta-sharing.md
@@ -4,6 +4,7 @@ parent: lakeFS Cloud
description: The lakeFS Delta Sharing service lets you export DeltaLake and HMS-style tables stored on lakeFS over the Delta Sharing protocol. This is particularly useful with DataBricks Unity.
redirect_from:
- /cloud/unity-delta-sharing.html
+ - /cloud/unity-delta-sharing-m0-users
---
# Unity Delta Sharing
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
index d032dc4d17f..6cd653e5991 100644
--- a/docs/reference/cli.md
+++ b/docs/reference/cli.md
@@ -4,6 +4,7 @@ description: lakeFS comes with its own native CLI client. Here you can see the c
parent: Reference
redirect_from:
- /reference/commands.html
+ - /quickstart/lakefs_cli.html
---
{% comment %}
@@ -40,7 +41,7 @@ lakectl config
# Config file /home/janedoe/.lakectl.yaml will be used
# Access key ID: AKIAIOSFODNN7EXAMPLE
# Secret access key: ****************************************
-# Server endpoint URL: http://localhost:8000/api/v1
+# Server endpoint URL: http://localhost:8000
```
This will setup a `$HOME/.lakectl.yaml` file with the credentials and API endpoint you've supplied.
diff --git a/docs/understand/data_lifecycle_management/data-devenv.md b/docs/understand/data_lifecycle_management/data-devenv.md
index 9b1e0a4e528..0d068ff6aa2 100644
--- a/docs/understand/data_lifecycle_management/data-devenv.md
+++ b/docs/understand/data_lifecycle_management/data-devenv.md
@@ -5,6 +5,7 @@ grand_parent: Understanding lakeFS
description: lakeFS enables a safe test environment on your data lake without the need to copy or mock data
redirect_from:
- /data_lifecycle_management/data-devenv.html
+ - /usecases/data-devenv.html
---
diff --git a/docs/understand/data_lifecycle_management/production.md b/docs/understand/data_lifecycle_management/production.md
index da60285fa59..bfc49c1e3aa 100644
--- a/docs/understand/data_lifecycle_management/production.md
+++ b/docs/understand/data_lifecycle_management/production.md
@@ -5,6 +5,7 @@ grand_parent: Understanding lakeFS
description: lakeFS helps recover from errors and find root case in production.
redirect_from:
- /data_lifecycle_management/production.html
+ - /usecases/production.html
---
## In Production
diff --git a/docs/understand/how/versioning-internals.md b/docs/understand/how/versioning-internals.md
index 07f832e388c..c76ca98395a 100644
--- a/docs/understand/how/versioning-internals.md
+++ b/docs/understand/how/versioning-internals.md
@@ -6,6 +6,7 @@ description: This section explains how versioning works in lakeFS.
redirect_from:
- /understand/architecture/data-model.html
- /understand/understand/data-model.html
+ - /understand/data-model.html
- /understand/versioning-internals.html
---
diff --git a/docs/understand/use_cases/cicd_for_data.md b/docs/understand/use_cases/cicd_for_data.md
index fd7a2f20ef0..3767ef3b7fa 100644
--- a/docs/understand/use_cases/cicd_for_data.md
+++ b/docs/understand/use_cases/cicd_for_data.md
@@ -6,6 +6,8 @@ parent: Use Cases
grand_parent: Understanding lakeFS
redirect_from:
- /use_cases/cicd_for_data.html
+ - /usecases/ci.html
+ - /usecases/cd.html
---
# CI/CD for Data
diff --git a/esti/copy_test.go b/esti/copy_test.go
index d1aeae1614b..53e552f00fb 100644
--- a/esti/copy_test.go
+++ b/esti/copy_test.go
@@ -21,7 +21,6 @@ const (
gsCopyDataPath = "gs://esti-system-testing-data/copy-test-data/"
azureCopyDataPath = "https://esti.blob.core.windows.net/esti-system-testing-data/copy-test-data/"
azureAbortAccount = "esti4multipleaccounts"
- ingestionBranch = "test-data"
largeObject = "squash.tar"
)
@@ -31,7 +30,14 @@ func TestCopyObject(t *testing.T) {
t.Run("copy_large_size_file", func(t *testing.T) {
importPath := getImportPath(t)
- importTestData(t, ctx, client, repo, importPath)
+
+ const ingestionBranch = "test-copy"
+
+ _ = testImportNew(t, ctx, repo, ingestionBranch,
+ []apigen.ImportLocation{{Path: importPath, Type: "common_prefix"}},
+ map[string]string{"created_by": "import"},
+ )
+
res, err := client.StatObjectWithResponse(ctx, repo, ingestionBranch, &apigen.StatObjectParams{
Path: largeObject,
})
@@ -50,7 +56,7 @@ func TestCopyObject(t *testing.T) {
require.NoError(t, err, "failed to copy")
require.NotNil(t, copyResp.JSON201)
- // Verify creation path, date and physical address are different
+ // Verify the creation path, date and physical address are different
copyStat := copyResp.JSON201
require.NotEqual(t, objStat.PhysicalAddress, copyStat.PhysicalAddress)
require.GreaterOrEqual(t, copyStat.Mtime, objStat.Mtime)
@@ -73,7 +79,12 @@ func TestCopyObject(t *testing.T) {
t.Run("copy_large_size_file_abort", func(t *testing.T) {
requireBlockstoreType(t, block.BlockstoreTypeAzure)
importPath := strings.Replace(azureCopyDataPath, "esti", azureAbortAccount, 1)
- importTestData(t, ctx, client, repo, importPath)
+ const ingestionBranch = "test-copy-abort"
+ _ = testImportNew(t, ctx, repo, ingestionBranch,
+ []apigen.ImportLocation{{Path: importPath, Type: "common_prefix"}},
+ map[string]string{"created_by": "import"},
+ )
+
res, err := client.StatObjectWithResponse(ctx, repo, ingestionBranch, &apigen.StatObjectParams{
Path: largeObject,
})
@@ -130,52 +141,3 @@ func getImportPath(t *testing.T) string {
}
return importPath
}
-
-func importTestData(t *testing.T, ctx context.Context, client apigen.ClientWithResponsesInterface, repoName, importPath string) {
- var (
- after = ""
- token *string
- ranges []apigen.RangeMetadata
- )
- for {
- resp, err := client.IngestRangeWithResponse(ctx, repoName, apigen.IngestRangeJSONRequestBody{
- After: after,
- ContinuationToken: token,
- FromSourceURI: importPath,
- })
- require.NoError(t, err, "failed to ingest range")
- require.Equal(t, http.StatusCreated, resp.StatusCode())
- require.NotNil(t, resp.JSON201)
- ranges = append(ranges, *resp.JSON201.Range)
- if !resp.JSON201.Pagination.HasMore {
- break
- }
- after = resp.JSON201.Pagination.LastKey
- token = resp.JSON201.Pagination.ContinuationToken
- }
-
- metarangeResp, err := client.CreateMetaRangeWithResponse(ctx, repoName, apigen.CreateMetaRangeJSONRequestBody{
- Ranges: ranges,
- })
-
- require.NoError(t, err, "failed to create metarange")
- require.NotNil(t, metarangeResp.JSON201)
- require.NotNil(t, metarangeResp.JSON201.Id)
-
- _, err = client.CreateBranchWithResponse(ctx, repoName, apigen.CreateBranchJSONRequestBody{
- Name: ingestionBranch,
- Source: "main",
- })
- require.NoError(t, err, "failed to create branch")
-
- commitResp, err := client.CommitWithResponse(ctx, repoName, ingestionBranch, &apigen.CommitParams{
- SourceMetarange: metarangeResp.JSON201.Id,
- }, apigen.CommitJSONRequestBody{
- Message: "created by import",
- Metadata: &apigen.CommitCreation_Metadata{
- AdditionalProperties: map[string]string{"created_by": "import"},
- },
- })
- require.NoError(t, err, "failed to commit")
- require.NotNil(t, commitResp.JSON201)
-}
diff --git a/esti/gc_utils_test.go b/esti/gc_utils_test.go
index 2d6170b632d..ee7c1e2c80d 100644
--- a/esti/gc_utils_test.go
+++ b/esti/gc_utils_test.go
@@ -8,6 +8,7 @@ import (
"os/exec"
"strings"
+ "github.com/treeverse/lakefs/pkg/api/apiutil"
"github.com/treeverse/lakefs/pkg/logging"
)
@@ -15,7 +16,7 @@ func getSparkSubmitArgs(entryPoint string) []string {
return []string{
"--master", "spark://localhost:7077",
"--conf", "spark.driver.extraJavaOptions=-Divy.cache.dir=/tmp -Divy.home=/tmp",
- "--conf", "spark.hadoop.lakefs.api.url=http://lakefs:8000/api/v1",
+ "--conf", "spark.hadoop.lakefs.api.url=http://lakefs:8000" + apiutil.BaseURL,
"--conf", "spark.hadoop.lakefs.api.access_key=AKIAIOSFDNN7EXAMPLEQ",
"--conf", "spark.hadoop.lakefs.api.secret_key=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
"--class", entryPoint,
@@ -23,7 +24,8 @@ func getSparkSubmitArgs(entryPoint string) []string {
}
func getDockerArgs(workingDirectory string, localJar string) []string {
- return []string{"run", "--network", "host", "--add-host", "lakefs:127.0.0.1",
+ return []string{
+ "run", "--network", "host", "--add-host", "lakefs:127.0.0.1",
"-v", fmt.Sprintf("%s/ivy:/opt/bitnami/spark/.ivy2", workingDirectory),
"-v", fmt.Sprintf("%s:/opt/metaclient/client.jar", localJar),
"--rm",
diff --git a/esti/golden/lakectl_doctor_wrong_endpoint.golden b/esti/golden/lakectl_doctor_wrong_endpoint.golden
index 0edd58e9f07..d556b15d47d 100644
--- a/esti/golden/lakectl_doctor_wrong_endpoint.golden
+++ b/esti/golden/lakectl_doctor_wrong_endpoint.golden
@@ -1,3 +1,3 @@
It looks like endpoint url is wrong.
-Suspicious URI format for server.endpoint_url: http://${HOST}/api/v11
+Suspicious URI format for server.endpoint_url: ${HOST}/api/v11
diff --git a/esti/golden/lakectl_doctor_wrong_endpoint_verbose.golden b/esti/golden/lakectl_doctor_wrong_endpoint_verbose.golden
index ceee34e890d..47accb7696d 100644
--- a/esti/golden/lakectl_doctor_wrong_endpoint_verbose.golden
+++ b/esti/golden/lakectl_doctor_wrong_endpoint_verbose.golden
@@ -9,4 +9,4 @@ Couldn't find a problem with access key format.
Trying to validate secret access key format.
Couldn't find a problem with secret access key format.
Trying to validate endpoint URL format.
-Suspicious URI format for server.endpoint_url: http://${HOST}/api/v11
+Suspicious URI format for server.endpoint_url: ${HOST}/api/v11
diff --git a/esti/import_test.go b/esti/import_test.go
index af9e3c09601..b1622ba69b2 100644
--- a/esti/import_test.go
+++ b/esti/import_test.go
@@ -4,7 +4,6 @@ import (
"context"
"fmt"
"net/http"
- "net/url"
"os"
"path/filepath"
"strconv"
@@ -12,11 +11,9 @@ import (
"testing"
"time"
- "github.com/rs/xid"
"github.com/spf13/viper"
"github.com/stretchr/testify/require"
"github.com/treeverse/lakefs/pkg/api/apigen"
- "github.com/treeverse/lakefs/pkg/api/apiutil"
"github.com/treeverse/lakefs/pkg/block"
"github.com/treeverse/lakefs/pkg/catalog"
"github.com/treeverse/lakefs/pkg/config"
@@ -31,7 +28,6 @@ const (
azureImportPath = "https://esti.blob.core.windows.net/esti-system-testing-data/import-test-data/"
importTargetPrefix = "imported/new-prefix/"
importBranchBase = "ingestion"
- adlsTestImportPath = "import-test-cases"
)
var importFilesToCheck = []string{
@@ -73,29 +69,6 @@ func setupImportByBlockstoreType(t testing.TB) (string, string, int) {
return blockstoreType, importPath, expectedContentLength
}
-func TestImport(t *testing.T) {
- ctx, _, repoName := setupTest(t)
- defer tearDownTest(repoName)
- blockstoreType, importPath, expectedContentLength := setupImportByBlockstoreType(t)
-
- t.Run("default", func(t *testing.T) {
- importBranch := fmt.Sprintf("%s-%s", importBranchBase, "default")
- testImport(t, ctx, repoName, importPath, importBranch)
- verifyImportObjects(t, ctx, repoName, importTargetPrefix, importBranch, importFilesToCheck, expectedContentLength)
- })
-
- t.Run("parent", func(t *testing.T) {
- importBranch := fmt.Sprintf("%s-%s", importBranchBase, "parent")
- if blockstoreType == block.BlockstoreTypeLocal {
- t.Skip("local always assumes import path is dir")
- }
- // import without the directory separator as suffix to include the parent directory
- importPathParent := strings.TrimSuffix(importPath, "/")
- testImport(t, ctx, repoName, importPathParent, importBranch)
- verifyImportObjects(t, ctx, repoName, importTargetPrefix+"import-test-data/", importBranch, importFilesToCheck, expectedContentLength)
- })
-}
-
func setupLocalImportPath(t testing.TB) string {
const dirPerm = 0o755
importDir := filepath.Join(t.TempDir(), "import-test-data") + "/"
@@ -163,142 +136,7 @@ func verifyImportObjects(t testing.TB, ctx context.Context, repoName, prefix, im
t.Log("Total objects imported:", count)
}
-func ingestRange(t testing.TB, ctx context.Context, repoName, importPath string) ([]apigen.RangeMetadata, string) {
- var (
- after string
- token *string
- ranges []apigen.RangeMetadata
- stagingToken string
- )
- for {
- resp, err := client.IngestRangeWithResponse(ctx, repoName, apigen.IngestRangeJSONRequestBody{
- After: after,
- ContinuationToken: token,
- FromSourceURI: importPath,
- Prepend: importTargetPrefix,
- })
- require.NoError(t, err, "failed to ingest range")
- require.Equal(t, http.StatusCreated, resp.StatusCode())
- require.NotNil(t, resp.JSON201)
- ranges = append(ranges, *resp.JSON201.Range)
- stagingToken = apiutil.Value(resp.JSON201.Pagination.StagingToken)
- if !resp.JSON201.Pagination.HasMore {
- break
- }
- after = resp.JSON201.Pagination.LastKey
- token = resp.JSON201.Pagination.ContinuationToken
- }
- return ranges, stagingToken
-}
-
-func testImport(t testing.TB, ctx context.Context, repoName, importPath, importBranch string) {
- ranges, stagingToken := ingestRange(t, ctx, repoName, importPath)
-
- metarangeResp, err := client.CreateMetaRangeWithResponse(ctx, repoName, apigen.CreateMetaRangeJSONRequestBody{
- Ranges: ranges,
- })
-
- require.NoError(t, err, "failed to create metarange")
- require.Equal(t, http.StatusCreated, metarangeResp.StatusCode())
- require.NotNil(t, metarangeResp.JSON201.Id, "failed to create metarange")
-
- createResp, err := client.CreateBranchWithResponse(ctx, repoName, apigen.CreateBranchJSONRequestBody{
- Name: importBranch,
- Source: "main",
- })
- require.NoError(t, err, "failed to create branch", importBranch)
- require.Equal(t, http.StatusCreated, createResp.StatusCode(), "failed to create branch", importBranch)
-
- commitResp, err := client.CommitWithResponse(ctx, repoName, importBranch, &apigen.CommitParams{
- SourceMetarange: metarangeResp.JSON201.Id,
- }, apigen.CommitJSONRequestBody{
- Message: "created by import",
- Metadata: &apigen.CommitCreation_Metadata{
- AdditionalProperties: map[string]string{"created_by": "import"},
- },
- })
- require.NoError(t, err, "failed to commit")
- require.Equal(t, http.StatusCreated, commitResp.StatusCode(), "failed to commit")
-
- if stagingToken != "" {
- stageResp, err := client.UpdateBranchTokenWithResponse(ctx, repoName, importBranch, apigen.UpdateBranchTokenJSONRequestBody{StagingToken: stagingToken})
- require.NoError(t, err, "failed to change branch token")
- require.Equal(t, http.StatusNoContent, stageResp.StatusCode(), "failed to change branch token")
-
- commitResp, err = client.CommitWithResponse(ctx, repoName, importBranch, &apigen.CommitParams{}, apigen.CommitJSONRequestBody{
- Message: "created by import on skipped objects",
- Metadata: &apigen.CommitCreation_Metadata{
- AdditionalProperties: map[string]string{"created_by": "import"},
- },
- })
- require.NoError(t, err, "failed to commit")
- require.Equal(t, http.StatusCreated, commitResp.StatusCode(), "failed to commit")
- }
-}
-
-func TestAzureDataLakeV2(t *testing.T) {
- importPrefix := viper.GetString("adls_import_base_url")
- if importPrefix == "" {
- t.Skip("No Azure data lake storage path prefix was given")
- }
-
- ctx, _, repoName := setupTest(t)
- defer tearDownTest(repoName)
-
- tests := []struct {
- name string
- prefix string
- filesToCheck []string
- }{
- {
- name: "import-test-data",
- prefix: "",
- filesToCheck: importFilesToCheck,
- },
- {
- name: "empty-folders",
- prefix: adlsTestImportPath,
- filesToCheck: []string{},
- },
- {
- name: "prefix-item-order",
- prefix: adlsTestImportPath,
- filesToCheck: []string{
- "aaa",
- "helloworld.csv",
- "zero",
- "helloworld/myfile.csv",
- },
- },
- //{ // Use this configuration to run import on big dataset of ~620,000 objects
- // name: "adls-big-import",
- // prefix: "",
- // filesToCheck: []string{},
- //},
- }
-
- for _, tt := range tests {
- importBranch := fmt.Sprintf("%s-%s", importBranchBase, tt.name)
- // each test is a folder under the prefix import
- t.Run(tt.name, func(t *testing.T) {
- importPath, err := url.JoinPath(importPrefix, tt.prefix, tt.name)
- if err != nil {
- t.Fatal("Import URL", err)
- }
- testImport(t, ctx, repoName, importPath, importBranch)
- if len(tt.filesToCheck) == 0 {
- resp, err := client.ListObjectsWithResponse(ctx, repoName, importBranch, &apigen.ListObjectsParams{})
- require.NoError(t, err)
- require.NotNil(t, resp.JSON200)
- require.Empty(t, resp.JSON200.Results)
- } else {
- verifyImportObjects(t, ctx, repoName, filepath.Join(importTargetPrefix, tt.name)+"/", importBranch, tt.filesToCheck, 0)
- }
- })
- }
-}
-
-func TestImportNew(t *testing.T) {
+func TestImport(t *testing.T) {
blockstoreType, importPath, expectedContentLength := setupImportByBlockstoreType(t)
metadata := map[string]string{"created_by": "import"}
@@ -354,7 +192,7 @@ func TestImportNew(t *testing.T) {
Path: importPathParent,
Type: catalog.ImportPathTypePrefix,
}}
- _ = testImportNew(t, ctx, repoName, branch, paths, &metadata)
+ _ = testImportNew(t, ctx, repoName, branch, paths, metadata)
verifyImportObjects(t, ctx, repoName, importTargetPrefix+"import-test-data/", branch, importFilesToCheck, expectedContentLength)
})
@@ -377,7 +215,7 @@ func TestImportNew(t *testing.T) {
Type: catalog.ImportPathTypePrefix,
})
- _ = testImportNew(t, ctx, repoName, branch, paths, &metadata)
+ _ = testImportNew(t, ctx, repoName, branch, paths, metadata)
verifyImportObjects(t, ctx, repoName, importTargetPrefix, branch, importFilesToCheck, expectedContentLength)
})
@@ -408,7 +246,7 @@ func TestImportNew(t *testing.T) {
})
}
-func testImportNew(t testing.TB, ctx context.Context, repoName, importBranch string, paths []apigen.ImportLocation, metadata *map[string]string) string {
+func testImportNew(t testing.TB, ctx context.Context, repoName, importBranch string, paths []apigen.ImportLocation, metadata map[string]string) string {
createResp, err := client.CreateBranchWithResponse(ctx, repoName, apigen.CreateBranchJSONRequestBody{
Name: importBranch,
Source: "main",
@@ -422,12 +260,13 @@ func testImportNew(t testing.TB, ctx context.Context, repoName, importBranch str
},
Paths: paths,
}
- if metadata != nil {
- body.Commit.Metadata = &apigen.CommitCreation_Metadata{AdditionalProperties: *metadata}
+ if len(metadata) > 0 {
+ body.Commit.Metadata = &apigen.CommitCreation_Metadata{AdditionalProperties: metadata}
}
importResp, err := client.ImportStartWithResponse(ctx, repoName, importBranch, body)
- require.NotNil(t, importResp.JSON202, "failed to start import", err)
+ require.NoError(t, err, "failed to start import", importBranch)
+ require.NotNil(t, importResp.JSON202, "failed to start import", importResp.Status())
require.NotNil(t, importResp.JSON202.Id, "missing import ID")
var (
@@ -435,29 +274,26 @@ func testImportNew(t testing.TB, ctx context.Context, repoName, importBranch str
updateTime time.Time
)
importID := importResp.JSON202.Id
- ticker := time.NewTicker(5 * time.Second)
+
+ ticker := time.NewTicker(2 * time.Second)
defer ticker.Stop()
- for {
- select {
- case <-ctx.Done():
- t.Fatalf("context canceled")
- case <-ticker.C:
- statusResp, err = client.ImportStatusWithResponse(ctx, repoName, importBranch, &apigen.ImportStatusParams{
- Id: importID,
- })
- require.NoError(t, err)
- require.NotNil(t, statusResp.JSON200, "failed to get import status", err)
- status := statusResp.JSON200
- require.Nil(t, status.Error)
- require.NotEqual(t, updateTime, status.UpdateTime)
- updateTime = status.UpdateTime
- t.Log("Import progress:", *status.IngestedObjects, importID)
- }
- if statusResp.JSON200.Completed {
- t.Log("Import completed:", importID)
- return importID
+ for range ticker.C {
+ statusResp, err = client.ImportStatusWithResponse(ctx, repoName, importBranch, &apigen.ImportStatusParams{
+ Id: importID,
+ })
+ require.NoError(t, err, "failed to get import status", importID)
+ require.NotNil(t, statusResp.JSON200, "failed to get import status", err)
+ status := statusResp.JSON200
+ require.Nil(t, status.Error, "import failed", err)
+ require.NotEqual(t, updateTime, status.UpdateTime)
+ updateTime = status.UpdateTime
+ t.Log("Import progress:", *status.IngestedObjects, importID)
+ if status.Completed {
+ break
}
}
+ t.Log("Import completed:", importID)
+ return importID
}
func TestImportCancel(t *testing.T) {
@@ -514,68 +350,3 @@ func TestImportCancel(t *testing.T) {
timer.Reset(3 * time.Second) // Server updates status every 1 second - unless operation was canceled successfully
}
}
-
-// #####################################################################################################################
-// # #
-// # BENCHMARKS #
-// # #
-// #####################################################################################################################
-func BenchmarkIngest_Azure(b *testing.B) {
- requireBlockstoreType(b, block.BlockstoreTypeAzure)
- ctx, _, repoName := setupTest(b)
- defer tearDownTest(repoName)
-
- b.Run("alds_gen2_ingest", func(b *testing.B) {
- importPrefix := viper.GetString("adls_import_base_url")
- if importPrefix == "" {
- b.Skip("No Azure data lake storage path prefix was given")
- }
- importPath, err := url.JoinPath(importPrefix, "import-test-data/")
- if err != nil {
- b.Fatal("Import URL", err)
- }
- benchmarkIngest(b, ctx, repoName, importPath)
- })
-
- b.Run("blob_storage_ingest", func(b *testing.B) {
- benchmarkIngest(b, ctx, repoName, azureImportPath)
- })
-}
-
-func benchmarkIngest(b *testing.B, ctx context.Context, repoName, importPath string) {
- b.ResetTimer()
- for n := 0; n < b.N; n++ {
- ingestRange(b, ctx, repoName, importPath)
- }
-}
-
-func BenchmarkImport_Azure(b *testing.B) {
- requireBlockstoreType(b, block.BlockstoreTypeAzure)
- ctx, _, repoName := setupTest(b)
- defer tearDownTest(repoName)
-
- b.Run("alds_gen2_import", func(b *testing.B) {
- importPrefix := viper.GetString("adls_import_base_url")
- if importPrefix == "" {
- b.Skip("No Azure data lake storage path prefix was given")
- }
- importBranch := fmt.Sprintf("%s-%s", importBranchBase, makeRepositoryName(b.Name()))
- importPath, err := url.JoinPath(importPrefix, "import-test-data/")
- if err != nil {
- b.Fatal("Import URL", err)
- }
- benchmarkImport(b, ctx, repoName, importPath, importBranch)
- })
-
- b.Run("blob_storage_import", func(b *testing.B) {
- importBranch := fmt.Sprintf("%s-%s", importBranchBase, makeRepositoryName(b.Name()))
- benchmarkImport(b, ctx, repoName, azureImportPath, importBranch)
- })
-}
-
-func benchmarkImport(b *testing.B, ctx context.Context, repoName, importPath, importBranch string) {
- b.ResetTimer()
- for n := 0; n < b.N; n++ {
- testImport(b, ctx, repoName, importPath, fmt.Sprintf("%s-%s", importBranch, xid.New().String()))
- }
-}
diff --git a/esti/lakectl_doctor_test.go b/esti/lakectl_doctor_test.go
index 007398549cf..94f4416846f 100644
--- a/esti/lakectl_doctor_test.go
+++ b/esti/lakectl_doctor_test.go
@@ -1,22 +1,24 @@
package esti
import (
+ "fmt"
"net/url"
"testing"
"github.com/spf13/viper"
"github.com/stretchr/testify/require"
+ "github.com/treeverse/lakefs/pkg/api/apiutil"
)
func TestLakectlDoctor(t *testing.T) {
accessKeyID := viper.GetString("access_key_id")
secretAccessKey := viper.GetString("secret_access_key")
- endPointURL := viper.GetString("endpoint_url") + "/api/v1"
+ endPointURL := viper.GetString("endpoint_url") + apiutil.BaseURL
u, err := url.Parse(endpointURL)
require.NoError(t, err)
vars := map[string]string{
"LAKEFS_ENDPOINT": endPointURL,
- "HOST": u.Host,
+ "HOST": fmt.Sprintf("%s://%s", u.Scheme, u.Host),
}
RunCmdAndVerifySuccessWithFile(t, LakectlWithParams(accessKeyID, secretAccessKey, endPointURL)+" doctor", false, "lakectl_doctor_ok", vars)
diff --git a/pkg/actions/lua.go b/pkg/actions/lua.go
index e290bbc76d0..383813b2725 100644
--- a/pkg/actions/lua.go
+++ b/pkg/actions/lua.go
@@ -14,6 +14,7 @@ import (
lualibs "github.com/treeverse/lakefs/pkg/actions/lua"
"github.com/treeverse/lakefs/pkg/actions/lua/lakefs"
luautil "github.com/treeverse/lakefs/pkg/actions/lua/util"
+ "github.com/treeverse/lakefs/pkg/api/apiutil"
"github.com/treeverse/lakefs/pkg/auth"
"github.com/treeverse/lakefs/pkg/auth/model"
"github.com/treeverse/lakefs/pkg/graveler"
@@ -95,8 +96,11 @@ func (h *LuaHook) Run(ctx context.Context, record graveler.HookRecord, buf *byte
if h.Endpoint == nil {
return fmt.Errorf("no endpoint configured, cannot request object: %s: %w", h.ScriptPath, ErrInvalidAction)
}
- reqURL := fmt.Sprintf("/api/v1/repositories/%s/refs/%s/objects",
- url.PathEscape(string(record.RepositoryID)), url.PathEscape(string(record.SourceRef)))
+ reqURL, err := url.JoinPath(apiutil.BaseURL,
+ "repositories", string(record.RepositoryID), "refs", string(record.SourceRef), "objects")
+ if err != nil {
+ return err
+ }
req, err := http.NewRequest(http.MethodGet, reqURL, nil)
if err != nil {
return err
diff --git a/pkg/actions/lua/lakefs/client.go b/pkg/actions/lua/lakefs/client.go
index 9eb3f52d377..acc0df3334c 100644
--- a/pkg/actions/lua/lakefs/client.go
+++ b/pkg/actions/lua/lakefs/client.go
@@ -14,6 +14,7 @@ import (
"github.com/Shopify/go-lua"
"github.com/go-chi/chi/v5"
"github.com/treeverse/lakefs/pkg/actions/lua/util"
+ "github.com/treeverse/lakefs/pkg/api/apiutil"
"github.com/treeverse/lakefs/pkg/auth"
"github.com/treeverse/lakefs/pkg/auth/model"
"github.com/treeverse/lakefs/pkg/version"
@@ -29,14 +30,15 @@ func check(l *lua.State, err error) {
}
}
-func newLakeFSRequest(ctx context.Context, user *model.User, method, url string, data []byte) (*http.Request, error) {
- if !strings.HasPrefix(url, "/api/") {
- if strings.HasPrefix(url, "/") {
- url = fmt.Sprintf("/api/v1%s", url)
- } else {
- url = fmt.Sprintf("/api/v1/%s", url)
+func newLakeFSRequest(ctx context.Context, user *model.User, method, reqURL string, data []byte) (*http.Request, error) {
+ if !strings.HasPrefix(reqURL, "/api/") {
+ var err error
+ reqURL, err = url.JoinPath(apiutil.BaseURL, reqURL)
+ if err != nil {
+ return nil, err
}
}
+
var body io.Reader
if data == nil {
body = bytes.NewReader(data)
@@ -47,7 +49,7 @@ func newLakeFSRequest(ctx context.Context, user *model.User, method, url string,
ctx = context.WithValue(ctx, chi.RouteCtxKey, nil)
// Add user to the request context
ctx = auth.WithUser(ctx, user)
- req, err := http.NewRequestWithContext(ctx, method, url, body)
+ req, err := http.NewRequestWithContext(ctx, method, reqURL, body)
if err != nil {
return nil, err
}
@@ -55,8 +57,8 @@ func newLakeFSRequest(ctx context.Context, user *model.User, method, url string,
return req, nil
}
-func newLakeFSJSONRequest(ctx context.Context, user *model.User, method, url string, data []byte) (*http.Request, error) {
- req, err := newLakeFSRequest(ctx, user, method, url, data)
+func newLakeFSJSONRequest(ctx context.Context, user *model.User, method, reqURL string, data []byte) (*http.Request, error) {
+ req, err := newLakeFSRequest(ctx, user, method, reqURL, data)
if err != nil {
return nil, err
}
@@ -86,9 +88,11 @@ func OpenClient(l *lua.State, ctx context.Context, user *model.User, server *htt
if err != nil {
check(l, err)
}
-
- path := fmt.Sprintf("/repositories/%s/tags", url.PathEscape(repo))
- req, err := newLakeFSJSONRequest(ctx, user, http.MethodPost, path, data)
+ reqURL, err := url.JoinPath("/repositories", repo, "tags")
+ if err != nil {
+ check(l, err)
+ }
+ req, err := newLakeFSJSONRequest(ctx, user, http.MethodPost, reqURL, data)
if err != nil {
check(l, err)
}
@@ -98,7 +102,10 @@ func OpenClient(l *lua.State, ctx context.Context, user *model.User, server *htt
repo := lua.CheckString(l, 1)
leftRef := lua.CheckString(l, 2)
rightRef := lua.CheckString(l, 3)
- reqURL := fmt.Sprintf("/repositories/%s/refs/%s/diff/%s", url.PathEscape(repo), url.PathEscape(leftRef), url.PathEscape(rightRef))
+ reqURL, err := url.JoinPath("/repositories", repo, "refs", leftRef, "diff", rightRef)
+ if err != nil {
+ check(l, err)
+ }
req, err := newLakeFSJSONRequest(ctx, user, http.MethodGet, reqURL, nil)
if err != nil {
check(l, err)
@@ -123,7 +130,10 @@ func OpenClient(l *lua.State, ctx context.Context, user *model.User, server *htt
{Name: "list_objects", Function: func(state *lua.State) int {
repo := lua.CheckString(l, 1)
ref := lua.CheckString(l, 2)
- reqURL := fmt.Sprintf("/repositories/%s/refs/%s/objects/ls", url.PathEscape(repo), url.PathEscape(ref))
+ reqURL, err := url.JoinPath("/repositories", repo, "refs", ref, "objects/ls")
+ if err != nil {
+ check(l, err)
+ }
req, err := newLakeFSJSONRequest(ctx, user, http.MethodGet, reqURL, nil)
if err != nil {
check(l, err)
@@ -155,7 +165,10 @@ func OpenClient(l *lua.State, ctx context.Context, user *model.User, server *htt
{Name: "get_object", Function: func(state *lua.State) int {
repo := lua.CheckString(l, 1)
ref := lua.CheckString(l, 2)
- reqURL := fmt.Sprintf("/repositories/%s/refs/%s/objects", url.PathEscape(repo), url.PathEscape(ref))
+ reqURL, err := url.JoinPath("/repositories", repo, "refs", ref, "objects")
+ if err != nil {
+ check(l, err)
+ }
req, err := newLakeFSJSONRequest(ctx, user, http.MethodGet, reqURL, nil)
if err != nil {
check(l, err)
@@ -173,7 +186,10 @@ func OpenClient(l *lua.State, ctx context.Context, user *model.User, server *htt
{Name: "diff_branch", Function: func(state *lua.State) int {
repo := lua.CheckString(l, 1)
branch := lua.CheckString(l, 2)
- reqURL := fmt.Sprintf("/repositories/%s/branches/%s/diff", url.PathEscape(repo), url.PathEscape(branch))
+ reqURL, err := url.JoinPath("/repositories", repo, "branches", branch, "diff")
+ if err != nil {
+ check(l, err)
+ }
req, err := newLakeFSJSONRequest(ctx, user, http.MethodGet, reqURL, nil)
if err != nil {
check(l, err)
diff --git a/pkg/api/controller.go b/pkg/api/controller.go
index 30eae107cc4..d9f995a2f12 100644
--- a/pkg/api/controller.go
+++ b/pkg/api/controller.go
@@ -62,8 +62,6 @@ const (
DefaultMaxDeleteObjects = 1000
- DefaultResetPasswordExpiration = 20 * time.Minute
-
// httpStatusClientClosedRequest used as internal status code when request context is cancelled
httpStatusClientClosedRequest = 499
// httpStatusClientClosedRequestText text used for client closed request status code
@@ -705,7 +703,6 @@ func (c *Controller) ListGroupMembers(w http.ResponseWriter, r *http.Request, gr
response.Results = append(response.Results, apigen.User{
Id: u.Username,
CreationDate: u.CreatedAt.Unix(),
- Email: u.Email,
})
}
writeResponse(w, r, http.StatusOK, response)
@@ -1065,18 +1062,11 @@ func (c *Controller) ListUsers(w http.ResponseWriter, r *http.Request, params ap
response.Results = append(response.Results, apigen.User{
Id: u.Username,
CreationDate: u.CreatedAt.Unix(),
- Email: u.Email,
})
}
writeResponse(w, r, http.StatusOK, response)
}
-func (c *Controller) generateResetPasswordToken(email string, duration time.Duration) (string, error) {
- secret := c.Auth.SecretStore().SharedSecret()
- currentTime := time.Now()
- return auth.GenerateJWTResetPassword(secret, email, currentTime, currentTime.Add(duration))
-}
-
func (c *Controller) CreateUser(w http.ResponseWriter, r *http.Request, body apigen.CreateUserJSONRequestBody) {
invite := swag.BoolValue(body.InviteUser)
username := body.Id
@@ -2250,8 +2240,8 @@ func (c *Controller) ImportStart(w http.ResponseWriter, r *http.Request, body ap
})
}
-func importStatusToResponse(status *graveler.ImportStatus) apigen.ImportStatusResp {
- resp := apigen.ImportStatusResp{
+func importStatusToResponse(status *graveler.ImportStatus) apigen.ImportStatus {
+ resp := apigen.ImportStatus{
Completed: status.Completed,
IngestedObjects: &status.Progress,
UpdateTime: status.UpdatedAt,
@@ -2319,111 +2309,6 @@ func (c *Controller) ImportCancel(w http.ResponseWriter, r *http.Request, reposi
writeResponse(w, r, http.StatusNoContent, nil)
}
-func (c *Controller) IngestRange(w http.ResponseWriter, r *http.Request, body apigen.IngestRangeJSONRequestBody, repository string) {
- if !c.authorize(w, r, permissions.Node{
- Type: permissions.NodeTypeAnd,
- Nodes: []permissions.Node{
- {
- Permission: permissions.Permission{
- Action: permissions.ImportFromStorageAction,
- Resource: permissions.StorageNamespace(body.FromSourceURI),
- },
- },
- {
- Permission: permissions.Permission{
- Action: permissions.WriteObjectAction,
- Resource: permissions.ObjectArn(repository, body.Prepend),
- },
- },
- },
- }) {
- return
- }
-
- ctx := r.Context()
- c.LogAction(ctx, "ingest_range", r, repository, "", "")
-
- contToken := swag.StringValue(body.ContinuationToken)
- stagingToken := swag.StringValue(body.StagingToken)
- info, mark, err := c.Catalog.WriteRange(r.Context(), repository, catalog.WriteRangeRequest{
- SourceURI: body.FromSourceURI,
- Prepend: body.Prepend,
- After: body.After,
- StagingToken: stagingToken,
- ContinuationToken: contToken,
- })
- if c.handleAPIError(ctx, w, r, err) {
- return
- }
-
- writeResponse(w, r, http.StatusCreated, apigen.IngestRangeCreationResponse{
- Range: &apigen.RangeMetadata{
- Id: string(info.ID),
- MinKey: string(info.MinKey),
- MaxKey: string(info.MaxKey),
- Count: info.Count,
- EstimatedSize: int(info.EstimatedRangeSizeBytes),
- },
- Pagination: &apigen.ImportPagination{
- HasMore: mark.HasMore,
- ContinuationToken: &mark.ContinuationToken,
- LastKey: mark.LastKey,
- StagingToken: &mark.StagingToken,
- },
- })
-}
-
-func (c *Controller) CreateMetaRange(w http.ResponseWriter, r *http.Request, body apigen.CreateMetaRangeJSONRequestBody, repository string) {
- if !c.authorize(w, r, permissions.Node{
- Permission: permissions.Permission{
- Action: permissions.CreateMetaRangeAction,
- Resource: permissions.RepoArn(repository),
- },
- }) {
- return
- }
-
- ctx := r.Context()
- c.LogAction(ctx, "create_metarange", r, repository, "", "")
-
- ranges := make([]*graveler.RangeInfo, 0, len(body.Ranges))
- for _, r := range body.Ranges {
- ranges = append(ranges, &graveler.RangeInfo{
- ID: graveler.RangeID(r.Id),
- MinKey: graveler.Key(r.MinKey),
- MaxKey: graveler.Key(r.MaxKey),
- Count: r.Count,
- EstimatedRangeSizeBytes: uint64(r.EstimatedSize),
- })
- }
- info, err := c.Catalog.WriteMetaRange(r.Context(), repository, ranges)
- if c.handleAPIError(ctx, w, r, err) {
- return
- }
- writeResponse(w, r, http.StatusCreated, apigen.MetaRangeCreationResponse{
- Id: swag.String(string(info.ID)),
- })
-}
-
-func (c *Controller) UpdateBranchToken(w http.ResponseWriter, r *http.Request, body apigen.UpdateBranchTokenJSONRequestBody, repository, branch string) {
- if !c.authorize(w, r, permissions.Node{
- Permission: permissions.Permission{
- Action: permissions.WriteObjectAction,
- // This API writes an entire staging area to a branch and therefore requires permission to write to the entire repository space
- Resource: permissions.ObjectArn(repository, "*"),
- },
- }) {
- return
- }
- ctx := r.Context()
- c.LogAction(ctx, "update_branch_token", r, repository, branch, "")
- err := c.Catalog.UpdateBranchToken(ctx, repository, branch, body.StagingToken)
- if c.handleAPIError(ctx, w, r, err) {
- return
- }
- writeResponse(w, r, http.StatusNoContent, nil)
-}
-
func (c *Controller) Commit(w http.ResponseWriter, r *http.Request, body apigen.CommitJSONRequestBody, repository, branch string, params apigen.CommitParams) {
if !c.authorize(w, r, permissions.Node{
Permission: permissions.Permission{
@@ -4232,74 +4117,6 @@ func (c *Controller) GetCurrentUser(w http.ResponseWriter, r *http.Request) {
writeResponse(w, r, http.StatusOK, response)
}
-func (c *Controller) resetPasswordRequest(ctx context.Context, emailAddr string) error {
- user, err := c.Auth.GetUserByEmail(ctx, emailAddr)
- if err != nil {
- return err
- }
- emailAddr = swag.StringValue(user.Email)
- token, err := c.generateResetPasswordToken(emailAddr, DefaultResetPasswordExpiration)
- if err != nil {
- c.Logger.WithError(err).WithField("email_address", emailAddr).Error("reset password - failed generating token")
- return err
- }
- params := map[string]string{
- "token": token,
- }
- err = c.Emailer.SendResetPasswordEmail([]string{emailAddr}, params)
- if err != nil {
- c.Logger.WithError(err).WithField("email_address", emailAddr).Error("reset password - failed sending email")
- return err
- }
- c.Logger.WithField("email", emailAddr).Info("reset password email sent")
- return nil
-}
-
-func (c *Controller) ForgotPassword(w http.ResponseWriter, r *http.Request, body apigen.ForgotPasswordJSONRequestBody) {
- addr, err := mail.ParseAddress(body.Email)
- if err != nil {
- writeError(w, r, http.StatusBadRequest, "invalid email")
- return
- }
- err = c.resetPasswordRequest(r.Context(), addr.Address)
- if err != nil {
- c.Logger.WithError(err).WithField("email", body.Email).Debug("failed sending reset password email")
- }
- writeResponse(w, r, http.StatusNoContent, nil)
-}
-
-func (c *Controller) UpdatePassword(w http.ResponseWriter, r *http.Request, body apigen.UpdatePasswordJSONRequestBody) {
- claims, err := VerifyResetPasswordToken(r.Context(), c.Auth, body.Token)
- if err != nil {
- c.Logger.WithError(err).WithField("token", body.Token).Debug("failed to verify token")
- writeError(w, r, http.StatusUnauthorized, ErrAuthenticatingRequest)
- return
- }
-
- // verify provided email matched the token
- requestEmail := swag.StringValue(body.Email)
- if requestEmail != "" && requestEmail != claims.Subject {
- c.Logger.WithError(err).WithFields(logging.Fields{
- "token": body.Token,
- "request_email": requestEmail,
- }).Debug("requested email doesn't match the email provided in verified token")
- }
-
- user, err := c.Auth.GetUserByEmail(r.Context(), claims.Subject)
- if err != nil {
- c.Logger.WithError(err).WithField("email", claims.Subject).Warn("failed to retrieve user by email")
- writeError(w, r, http.StatusNotFound, http.StatusText(http.StatusNotFound))
- return
- }
- err = c.Auth.HashAndUpdatePassword(r.Context(), user.Username, body.NewPassword)
- if err != nil {
- c.Logger.WithError(err).WithField("username", user.Username).Debug("failed to update password")
- writeError(w, r, http.StatusInternalServerError, http.StatusText(http.StatusInternalServerError))
- return
- }
- writeResponse(w, r, http.StatusCreated, nil)
-}
-
func (c *Controller) GetLakeFSVersion(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
_, err := auth.GetUser(ctx)
diff --git a/pkg/api/controller_test.go b/pkg/api/controller_test.go
index af787ff2bd1..5800c94d04c 100644
--- a/pkg/api/controller_test.go
+++ b/pkg/api/controller_test.go
@@ -35,7 +35,6 @@ import (
"github.com/treeverse/lakefs/pkg/auth"
"github.com/treeverse/lakefs/pkg/block"
"github.com/treeverse/lakefs/pkg/catalog"
- "github.com/treeverse/lakefs/pkg/catalog/testutils"
"github.com/treeverse/lakefs/pkg/config"
"github.com/treeverse/lakefs/pkg/graveler"
"github.com/treeverse/lakefs/pkg/httputil"
@@ -147,8 +146,8 @@ func TestController_ListRepositoriesHandler(t *testing.T) {
t.Run("paginate repos after", func(t *testing.T) {
// write some repos
resp, err := clt.ListRepositoriesWithResponse(ctx, &apigen.ListRepositoriesParams{
- After: apiutil.Ptr(apigen.PaginationAfter("foo2")),
- Amount: apiutil.Ptr(apigen.PaginationAmount(2)),
+ After: apiutil.Ptr[apigen.PaginationAfter]("foo2"),
+ Amount: apiutil.Ptr[apigen.PaginationAmount](2),
})
if err != nil {
t.Fatal(err)
@@ -1194,7 +1193,7 @@ func TestController_ListBranchesHandler(t *testing.T) {
_, err := deps.catalog.CreateRepository(ctx, repo, onBlock(deps, "foo2"), "main")
testutil.Must(t, err)
- // create first dummy commit on main so that we can create branches from it
+ // create the first "dummy" commit on main so that we can create branches from it
testutil.Must(t, deps.catalog.CreateEntry(ctx, repo, "main", catalog.DBEntry{Path: "a/b"}))
_, err = deps.catalog.Commit(ctx, repo, "main", "first commit", "test", nil, nil, nil)
testutil.Must(t, err)
@@ -1213,8 +1212,8 @@ func TestController_ListBranchesHandler(t *testing.T) {
}
resp, err = clt.ListBranchesWithResponse(ctx, repo, &apigen.ListBranchesParams{
- After: apiutil.Ptr(apigen.PaginationAfter("main1")),
- Amount: apiutil.Ptr(apigen.PaginationAmount(2)),
+ After: apiutil.Ptr[apigen.PaginationAfter]("main1"),
+ Amount: apiutil.Ptr[apigen.PaginationAmount](2),
})
verifyResponseOK(t, resp, err)
results := resp.JSON200.Results
@@ -1334,7 +1333,7 @@ func TestController_GetBranchHandler(t *testing.T) {
testutil.Must(t, err)
t.Run("get default branch", func(t *testing.T) {
- // create first dummy commit on main so that we can create branches from it
+ // create the first "dummy" commit on main so that we can create branches from it
testutil.Must(t, deps.catalog.CreateEntry(ctx, repo, testBranch, catalog.DBEntry{Path: "a/b"}))
_, err = deps.catalog.Commit(ctx, repo, testBranch, "first commit", "test", nil, nil, nil)
testutil.Must(t, err)
@@ -1819,194 +1818,6 @@ func TestController_DeleteBranchHandler(t *testing.T) {
})
}
-func TestController_IngestRangeHandler(t *testing.T) {
- const (
- fromSourceURI = "https://valid.uri"
- uriPrefix = "take/from/here"
- fromSourceURIWithPrefix = fromSourceURI + "/" + uriPrefix
- after = "some/key/to/start/after"
- prepend = "some/logical/prefix"
- )
-
- const continuationToken = "opaque"
-
- t.Run("ingest directory marker", func(t *testing.T) {
- ctx := context.Background()
- w := testutils.NewFakeWalker(0, 1, uriPrefix, after, continuationToken, fromSourceURIWithPrefix, nil)
- w.Entries = []block.ObjectStoreEntry{
- {
- RelativeKey: "",
- FullKey: uriPrefix + "/",
- Address: fromSourceURIWithPrefix + "/",
- ETag: "dir_etag",
- Size: 0,
- },
- }
- clt, deps := setupClientWithAdminAndWalkerFactory(t, testutils.FakeFactory{Walker: w})
- _, err := deps.catalog.CreateRepository(ctx, "repo-dir-marker", onBlock(deps, "foo2"), "main")
- testutil.Must(t, err)
-
- resp, err := clt.IngestRangeWithResponse(ctx, "repo-dir-marker", apigen.IngestRangeJSONRequestBody{
- FromSourceURI: fromSourceURIWithPrefix,
- ContinuationToken: swag.String(continuationToken),
- After: after,
- })
- verifyResponseOK(t, resp, err)
- require.NotNil(t, resp.JSON201.Range)
- require.NotNil(t, resp.JSON201.Pagination)
- require.Equal(t, 1, resp.JSON201.Range.Count)
- require.Equal(t, resp.JSON201.Range.MinKey, "")
- require.Equal(t, resp.JSON201.Range.MaxKey, "")
- require.False(t, resp.JSON201.Pagination.HasMore)
- require.Empty(t, resp.JSON201.Pagination.LastKey)
- require.Empty(t, resp.JSON201.Pagination.ContinuationToken)
- })
-
- t.Run("successful ingestion no pagination", func(t *testing.T) {
- ctx := context.Background()
- repo := testUniqueRepoName()
- count := 1000
- clt, w := func(t *testing.T, count int, expectedErr error) (apigen.ClientWithResponsesInterface, *testutils.FakeWalker) {
- t.Helper()
- ctx := context.Background()
-
- w := testutils.NewFakeWalker(count, count, uriPrefix, after, continuationToken, fromSourceURIWithPrefix, expectedErr)
- clt, deps := setupClientWithAdminAndWalkerFactory(t, testutils.FakeFactory{Walker: w})
-
- // setup test data
- _, err := deps.catalog.CreateRepository(ctx, repo, onBlock(deps, "foo1"), "main")
- testutil.Must(t, err)
-
- return clt, w
- }(t, count, nil)
-
- resp, err := clt.IngestRangeWithResponse(ctx, repo, apigen.IngestRangeJSONRequestBody{
- After: after,
- FromSourceURI: fromSourceURIWithPrefix,
- Prepend: prepend,
- ContinuationToken: swag.String(continuationToken),
- })
-
- verifyResponseOK(t, resp, err)
- require.NotNil(t, resp.JSON201.Range)
- require.NotNil(t, resp.JSON201.Pagination)
- require.Equal(t, count, resp.JSON201.Range.Count)
- require.Equal(t, strings.Replace(w.Entries[0].FullKey, uriPrefix, prepend, 1), resp.JSON201.Range.MinKey)
- require.Equal(t, strings.Replace(w.Entries[count-1].FullKey, uriPrefix, prepend, 1), resp.JSON201.Range.MaxKey)
- require.False(t, resp.JSON201.Pagination.HasMore)
- require.Empty(t, resp.JSON201.Pagination.LastKey)
- require.Empty(t, resp.JSON201.Pagination.ContinuationToken)
- })
-
- t.Run("successful ingestion with pagination", func(t *testing.T) {
- // force splitting the range before
- ctx := context.Background()
- repo := testUniqueRepoName()
- count := 200_000
- clt, w := func(t *testing.T, count int, expectedErr error) (apigen.ClientWithResponsesInterface, *testutils.FakeWalker) {
- t.Helper()
- ctx := context.Background()
-
- w := testutils.NewFakeWalker(count, count, uriPrefix, after, continuationToken, fromSourceURIWithPrefix, expectedErr)
- clt, deps := setupClientWithAdminAndWalkerFactory(t, testutils.FakeFactory{Walker: w})
-
- // setup test data
- _, err := deps.catalog.CreateRepository(ctx, repo, onBlock(deps, "foo1"), "main")
- testutil.Must(t, err)
-
- return clt, w
- }(t, count, nil)
-
- resp, err := clt.IngestRangeWithResponse(ctx, repo, apigen.IngestRangeJSONRequestBody{
- After: after,
- FromSourceURI: fromSourceURIWithPrefix,
- Prepend: prepend,
- ContinuationToken: swag.String(continuationToken),
- })
-
- verifyResponseOK(t, resp, err)
- require.NotNil(t, resp.JSON201.Range)
- require.NotNil(t, resp.JSON201.Pagination)
- require.Less(t, resp.JSON201.Range.Count, count)
- require.Equal(t, strings.Replace(w.Entries[0].FullKey, uriPrefix, prepend, 1), resp.JSON201.Range.MinKey)
- require.Equal(t, strings.Replace(w.Entries[resp.JSON201.Range.Count-1].FullKey, uriPrefix, prepend, 1), resp.JSON201.Range.MaxKey)
- require.True(t, resp.JSON201.Pagination.HasMore)
- require.Equal(t, w.Entries[resp.JSON201.Range.Count-1].FullKey, resp.JSON201.Pagination.LastKey)
- require.Equal(t, testutils.ContinuationTokenOpaque, *resp.JSON201.Pagination.ContinuationToken)
- })
-
- t.Run("error during walk", func(t *testing.T) {
- // force splitting the range before
- ctx := context.Background()
- repo := testUniqueRepoName()
- count := 10
- expectedErr := errors.New("failed reading for object store")
- clt, _ := func(t *testing.T, count int, expectedErr error) (apigen.ClientWithResponsesInterface, *testutils.FakeWalker) {
- t.Helper()
- ctx := context.Background()
-
- w := testutils.NewFakeWalker(count, count, uriPrefix, after, continuationToken, fromSourceURIWithPrefix, expectedErr)
- clt, deps := setupClientWithAdminAndWalkerFactory(t, testutils.FakeFactory{Walker: w})
-
- // setup test data
- _, err := deps.catalog.CreateRepository(ctx, repo, onBlock(deps, "foo1"), "main")
- testutil.Must(t, err)
-
- return clt, w
- }(t, count, expectedErr)
-
- resp, err := clt.IngestRangeWithResponse(ctx, repo, apigen.IngestRangeJSONRequestBody{
- After: after,
- FromSourceURI: fromSourceURIWithPrefix,
- Prepend: prepend,
- ContinuationToken: swag.String(continuationToken),
- })
-
- require.NoError(t, err)
- require.Equal(t, http.StatusInternalServerError, resp.StatusCode())
- require.Contains(t, string(resp.Body), expectedErr.Error())
- })
-}
-
-func TestController_WriteMetaRangeHandler(t *testing.T) {
- ctx := context.Background()
- clt, deps := setupClientWithAdmin(t)
- repo := testUniqueRepoName()
- // setup test data
- _, err := deps.catalog.CreateRepository(ctx, repo, onBlock(deps, repo), "main")
- testutil.Must(t, err)
-
- t.Run("successful metarange creation", func(t *testing.T) {
- resp, err := clt.CreateMetaRangeWithResponse(ctx, repo, apigen.CreateMetaRangeJSONRequestBody{
- Ranges: []apigen.RangeMetadata{
- {Count: 11355, EstimatedSize: 123465897, Id: "FirstRangeID", MaxKey: "1", MinKey: "2"},
- {Count: 13123, EstimatedSize: 123465897, Id: "SecondRangeID", MaxKey: "3", MinKey: "4"},
- {Count: 10123, EstimatedSize: 123465897, Id: "ThirdRangeID", MaxKey: "5", MinKey: "6"},
- },
- })
-
- verifyResponseOK(t, resp, err)
- require.NotNil(t, resp.JSON201)
- require.NotNil(t, resp.JSON201.Id)
- require.NotEmpty(t, *resp.JSON201.Id)
-
- respMR, err := clt.GetMetaRangeWithResponse(ctx, repo, *resp.JSON201.Id)
- verifyResponseOK(t, respMR, err)
- require.NotNil(t, respMR.JSON200)
- require.NotEmpty(t, respMR.JSON200.Location)
- })
-
- t.Run("missing ranges", func(t *testing.T) {
- resp, err := clt.CreateMetaRangeWithResponse(ctx, repo, apigen.CreateMetaRangeJSONRequestBody{
- Ranges: []apigen.RangeMetadata{},
- })
-
- require.NoError(t, err)
- require.NotNil(t, resp.JSON400)
- require.Equal(t, http.StatusBadRequest, resp.StatusCode())
- })
-}
-
func TestController_ObjectsStatObjectHandler(t *testing.T) {
clt, deps := setupClientWithAdmin(t)
ctx := context.Background()
@@ -3466,7 +3277,7 @@ func TestController_Revert(t *testing.T) {
})
t.Run("dirty_branch", func(t *testing.T) {
- // create branch with entry without commit
+ // create branch with entry without a commit
createBranch, err := deps.catalog.CreateBranch(ctx, repo, "dirty", "main")
testutil.Must(t, err)
err = deps.catalog.CreateEntry(ctx, repo, "dirty", catalog.DBEntry{Path: "foo/bar2", PhysicalAddress: "bar2addr", CreationDate: time.Now(), Size: 1, Checksum: "cksum2"})
@@ -3621,7 +3432,7 @@ func TestController_CherryPick(t *testing.T) {
})
t.Run("dirty branch", func(t *testing.T) {
- // create branch with entry without commit
+ // create branch with entry without a commit
_, err := deps.catalog.CreateBranch(ctx, repo, "dirty", "main")
testutil.Must(t, err)
err = deps.catalog.CreateEntry(ctx, repo, "dirty", catalog.DBEntry{Path: "foo/bar5", PhysicalAddress: "bar50addr", CreationDate: time.Now(), Size: 5, Checksum: "cksum5"})
@@ -3991,7 +3802,7 @@ func TestController_ClientDisconnect(t *testing.T) {
t.Fatal("Expected to request complete without error, expected to fail")
}
- // wait for server to identify we left and update the counter
+ // wait for the server to identify we left and update the counter
time.Sleep(time.Second)
// request for metrics
@@ -4270,7 +4081,7 @@ func TestController_CopyObjectHandler(t *testing.T) {
})
verifyResponseOK(t, copyResp, err)
- // Verify creation path, date and physical address are different
+ // Verify the creation path, date and physical address are different
copyStat := copyResp.JSON201
require.NotNil(t, copyStat)
require.NotEqual(t, objStat.PhysicalAddress, copyStat.PhysicalAddress)
@@ -4297,7 +4108,7 @@ func TestController_CopyObjectHandler(t *testing.T) {
})
verifyResponseOK(t, copyResp, err)
- // Verify creation path, date and physical address are different
+ // Verify the creation path, date and physical address are different
copyStat := copyResp.JSON201
require.NotNil(t, copyStat)
require.NotEmpty(t, copyStat.PhysicalAddress)
@@ -4337,7 +4148,7 @@ func TestController_CopyObjectHandler(t *testing.T) {
})
verifyResponseOK(t, copyResp, err)
- // Verify creation path, date and physical address are different
+ // Verify the creation path, date and physical address are different
copyStat := copyResp.JSON201
require.NotNil(t, copyStat)
require.NotEmpty(t, copyStat.PhysicalAddress)
@@ -4566,7 +4377,7 @@ func TestController_BranchProtectionRules(t *testing.T) {
t.Fatalf("CreateBranchProtectionRulePreflightWithResponse expected %d, got %d", tc.expectedHttpStatus, respPreflight.StatusCode())
}
- // result of an actual call to the endpoint should have the same result
+ // the result of an actual call to the endpoint should have the same result
resp, err := tc.clt.CreateBranchProtectionRuleWithResponse(currCtx, repo, apigen.CreateBranchProtectionRuleJSONRequestBody{
Pattern: "main",
})
@@ -4625,7 +4436,7 @@ func TestController_GarbageCollectionRules(t *testing.T) {
t.Fatalf("SetGarbageCollectionRulesPreflightWithResponse expected %d, got %d", tc.expectedHttpStatus, respPreflight.StatusCode())
}
- // result of an actual call to the endpoint should have the same result
+ // the result of an actual call to the endpoint should have the same result
resp, err := tc.clt.SetGarbageCollectionRulesWithResponse(currCtx, repo, apigen.SetGarbageCollectionRulesJSONRequestBody{
Branches: []apigen.GarbageCollectionRule{{BranchId: "main", RetentionDays: 1}}, DefaultRetentionDays: 5,
})
diff --git a/pkg/auth/service.go b/pkg/auth/service.go
index 1dd6973441c..f9a91bbdd6b 100644
--- a/pkg/auth/service.go
+++ b/pkg/auth/service.go
@@ -28,7 +28,6 @@ import (
"github.com/treeverse/lakefs/pkg/kv"
"github.com/treeverse/lakefs/pkg/logging"
"github.com/treeverse/lakefs/pkg/permissions"
- "golang.org/x/crypto/bcrypt"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/types/known/timestamppb"
@@ -114,7 +113,6 @@ type Service interface {
GetCredentialsForUser(ctx context.Context, username, accessKeyID string) (*model.Credential, error)
GetCredentials(ctx context.Context, accessKeyID string) (*model.Credential, error)
ListUserCredentials(ctx context.Context, username string, params *model.PaginationParams) ([]*model.Credential, *model.Paginator, error)
- HashAndUpdatePassword(ctx context.Context, username string, password string) error
// policy<->user attachments
AttachPolicyToUser(ctx context.Context, policyDisplayName, username string) error
@@ -988,31 +986,6 @@ func (s *AuthService) GetCredentials(ctx context.Context, accessKeyID string) (*
})
}
-func (s *AuthService) HashAndUpdatePassword(ctx context.Context, username string, password string) error {
- user, err := s.GetUser(ctx, username)
- if err != nil {
- return err
- }
- pw, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)
- if err != nil {
- return err
- }
- userKey := model.UserPath(user.Username)
- userUpdatePassword := model.User{
- CreatedAt: user.CreatedAt,
- Username: user.Username,
- FriendlyName: user.FriendlyName,
- Email: user.Email,
- EncryptedPassword: pw,
- Source: user.Source,
- }
- err = kv.SetMsgIf(ctx, s.store, model.PartitionKey, userKey, model.ProtoFromUser(&userUpdatePassword), user)
- if err != nil {
- return fmt.Errorf("update user password (userKey %s): %w", userKey, err)
- }
- return err
-}
-
func interpolateUser(resource string, username string) string {
return strings.ReplaceAll(resource, "${user}", username)
}
@@ -1351,20 +1324,6 @@ func (a *APIAuthService) ListUsers(ctx context.Context, params *model.Pagination
return users, toPagination(pagination), nil
}
-func (a *APIAuthService) HashAndUpdatePassword(ctx context.Context, username string, password string) error {
- encryptedPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)
- if err != nil {
- return err
- }
- resp, err := a.apiClient.UpdatePasswordWithResponse(ctx, username, UpdatePasswordJSONRequestBody{EncryptedPassword: encryptedPassword})
- if err != nil {
- a.logger.WithField("username", username).WithError(err).Error("failed to update password")
- return err
- }
-
- return a.validateResponse(resp, http.StatusOK)
-}
-
func (a *APIAuthService) CreateGroup(ctx context.Context, group *model.Group) error {
resp, err := a.apiClient.CreateGroupWithResponse(ctx, CreateGroupJSONRequestBody{
Id: group.DisplayName,
diff --git a/pkg/block/azure/adapter.go b/pkg/block/azure/adapter.go
index 6e0850dc796..1058c881503 100644
--- a/pkg/block/azure/adapter.go
+++ b/pkg/block/azure/adapter.go
@@ -2,7 +2,6 @@ package azure
import (
"context"
- "errors"
"fmt"
"io"
"net/http"
@@ -21,18 +20,16 @@ import (
"github.com/treeverse/lakefs/pkg/logging"
)
-var ErrNotImplemented = errors.New("not implemented")
-
const (
sizeSuffix = "_size"
idSuffix = "_id"
_1MiB = 1024 * 1024
MaxBuffers = 1
// udcCacheSize - Arbitrary number: exceeding this number means that in the expiry timeframe we requested pre-signed urls from
- // more the 5000 different accounts which is highly unlikely
+ // more the 5000 different accounts, which is highly unlikely
udcCacheSize = 5000
- URLTemplate = "https://%s.blob.core.windows.net/"
+ BlobEndpointFormat = "https://%s.blob.core.windows.net/"
)
type Adapter struct {
@@ -92,7 +89,7 @@ func ResolveBlobURLInfoFromURL(pathURL *url.URL) (BlobURLInfo, error) {
return qk, err
}
- // In azure the first part of the path is part of the storage namespace
+ // In azure, the first part of the path is part of the storage namespace
trimmedPath := strings.Trim(pathURL.Path, "/")
pathParts := strings.Split(trimmedPath, "/")
if len(pathParts) == 0 {
@@ -222,9 +219,9 @@ func (a *Adapter) GetPreSignedURL(ctx context.Context, obj block.ObjectPointer,
Write: true,
}
}
- url, err := a.getPreSignedURL(ctx, obj, permissions)
+ preSignedURL, err := a.getPreSignedURL(ctx, obj, permissions)
// TODO(#6347): Report expiry.
- return url, time.Time{}, err
+ return preSignedURL, time.Time{}, err
}
func (a *Adapter) getPreSignedURL(ctx context.Context, obj block.ObjectPointer, permissions sas.BlobPermissions) (string, error) {
@@ -253,18 +250,25 @@ func (a *Adapter) getPreSignedURL(ctx context.Context, obj block.ObjectPointer,
}
// Create Blob Signature Values with desired permissions and sign with user delegation credential
- sasQueryParams, err := sas.BlobSignatureValues{
+ blobSignatureValues := sas.BlobSignatureValues{
Protocol: sas.ProtocolHTTPS,
ExpiryTime: urlExpiry,
Permissions: to.Ptr(permissions).String(),
ContainerName: qualifiedKey.ContainerName,
BlobName: qualifiedKey.BlobURL,
- }.SignWithUserDelegation(udc)
+ }
+ sasQueryParams, err := blobSignatureValues.SignWithUserDelegation(udc)
if err != nil {
return "", err
}
- u := fmt.Sprintf("%s/%s?%s", qualifiedKey.ContainerURL, qualifiedKey.BlobURL, sasQueryParams.Encode())
+ // format blob URL with signed SAS query params
+ accountEndpoint := fmt.Sprintf(BlobEndpointFormat, qualifiedKey.StorageAccountName)
+ u, err := url.JoinPath(accountEndpoint, qualifiedKey.ContainerName, qualifiedKey.BlobURL)
+ if err != nil {
+ return "", err
+ }
+ u += "?" + sasQueryParams.Encode()
return u, nil
}
@@ -541,7 +545,7 @@ func (a *Adapter) copyPartRange(ctx context.Context, sourceObj, destinationObj b
}
func (a *Adapter) AbortMultiPartUpload(_ context.Context, _ block.ObjectPointer, _ string) error {
- // Azure has no abort, in case of commit, uncommitted parts are erased, otherwise staged data is erased after 7 days
+ // Azure has no abort. In case of commit, uncommitted parts are erased. Otherwise, staged data is erased after 7 days
return nil
}
diff --git a/pkg/block/azure/client_cache.go b/pkg/block/azure/client_cache.go
index e82fbeb4f7a..90c1b8450ea 100644
--- a/pkg/block/azure/client_cache.go
+++ b/pkg/block/azure/client_cache.go
@@ -122,7 +122,7 @@ func BuildAzureServiceClient(params params.Azure) (*service.Client, error) {
if params.TestEndpointURL != "" { // For testing purposes - override default url template
url = params.TestEndpointURL
} else {
- url = fmt.Sprintf(URLTemplate, params.StorageAccount)
+ url = fmt.Sprintf(BlobEndpointFormat, params.StorageAccount)
}
options := service.ClientOptions{ClientOptions: azcore.ClientOptions{Retry: policy.RetryOptions{TryTimeout: params.TryTimeout}}}
diff --git a/pkg/catalog/catalog.go b/pkg/catalog/catalog.go
index 655832941c6..0c5dc2e7b77 100644
--- a/pkg/catalog/catalog.go
+++ b/pkg/catalog/catalog.go
@@ -2063,14 +2063,6 @@ func (c *Catalog) WriteMetaRange(ctx context.Context, repositoryID string, range
return c.Store.WriteMetaRange(ctx, repository, ranges)
}
-func (c *Catalog) UpdateBranchToken(ctx context.Context, repositoryID, branchID, stagingToken string) error {
- repository, err := c.getRepository(ctx, repositoryID)
- if err != nil {
- return err
- }
- return c.Store.UpdateBranchToken(ctx, repository, branchID, stagingToken)
-}
-
func (c *Catalog) GetGarbageCollectionRules(ctx context.Context, repositoryID string) (*graveler.GarbageCollectionRules, error) {
repository, err := c.getRepository(ctx, repositoryID)
if err != nil {
diff --git a/pkg/catalog/fake_graveler_test.go b/pkg/catalog/fake_graveler_test.go
index f7b6c737fe4..37d08fdf464 100644
--- a/pkg/catalog/fake_graveler_test.go
+++ b/pkg/catalog/fake_graveler_test.go
@@ -23,12 +23,7 @@ type FakeGraveler struct {
}
func (g *FakeGraveler) StageObject(ctx context.Context, stagingToken string, object graveler.ValueRecord) error {
- //TODO implement me
- panic("implement me")
-}
-
-func (g *FakeGraveler) UpdateBranchToken(_ context.Context, _ *graveler.RepositoryRecord, _, _ string) error {
- //TODO implement me
+ // TODO implement me
panic("implement me")
}
diff --git a/pkg/catalog/interface.go b/pkg/catalog/interface.go
index 9d14456cec8..31e81a26e60 100644
--- a/pkg/catalog/interface.go
+++ b/pkg/catalog/interface.go
@@ -154,7 +154,6 @@ type Interface interface {
CancelImport(ctx context.Context, repositoryID, importID string) error
WriteRange(ctx context.Context, repositoryID string, params WriteRangeRequest) (*graveler.RangeInfo, *Mark, error)
WriteMetaRange(ctx context.Context, repositoryID string, ranges []*graveler.RangeInfo) (*graveler.MetaRangeInfo, error)
- UpdateBranchToken(ctx context.Context, repositoryID, branchID, stagingToken string) error
GetGarbageCollectionRules(ctx context.Context, repositoryID string) (*graveler.GarbageCollectionRules, error)
SetGarbageCollectionRules(ctx context.Context, repositoryID string, rules *graveler.GarbageCollectionRules) error
diff --git a/pkg/fileutil/io.go b/pkg/fileutil/io.go
index 923f5012fcf..a25851e4b97 100644
--- a/pkg/fileutil/io.go
+++ b/pkg/fileutil/io.go
@@ -153,7 +153,7 @@ func VerifyAbsPath(absPath, basePath string) error {
}
func VerifyRelPath(relPath, basePath string) error {
- abs := basePath + string(os.PathSeparator) + relPath
+ abs := filepath.Join(basePath, relPath)
return VerifyAbsPath(abs, basePath)
}
diff --git a/pkg/graveler/graveler.go b/pkg/graveler/graveler.go
index dcb95ed2608..a7473f04ed1 100644
--- a/pkg/graveler/graveler.go
+++ b/pkg/graveler/graveler.go
@@ -656,8 +656,6 @@ type Plumbing interface {
WriteMetaRange(ctx context.Context, repository *RepositoryRecord, ranges []*RangeInfo) (*MetaRangeInfo, error)
// StageObject stages given object to stagingToken.
StageObject(ctx context.Context, stagingToken string, object ValueRecord) error
- // UpdateBranchToken updates the given branch stagingToken
- UpdateBranchToken(ctx context.Context, repository *RepositoryRecord, branchID, stagingToken string) error
}
type Dumper interface {
@@ -1104,25 +1102,6 @@ func (g *Graveler) StageObject(ctx context.Context, stagingToken string, object
return g.StagingManager.Set(ctx, StagingToken(stagingToken), object.Key, object.Value, false)
}
-func (g *Graveler) UpdateBranchToken(ctx context.Context, repository *RepositoryRecord, branchID, stagingToken string) error {
- err := g.RefManager.BranchUpdate(ctx, repository, BranchID(branchID), func(branch *Branch) (*Branch, error) {
- isEmpty, err := g.isStagingEmpty(ctx, repository, branch)
- if err != nil {
- return nil, err
- }
- if !isEmpty {
- return nil, fmt.Errorf("branch staging is not empty: %w", ErrDirtyBranch)
- }
- tokensToDrop := []StagingToken{branch.StagingToken}
- tokensToDrop = append(tokensToDrop, branch.SealedTokens...)
- g.dropTokens(ctx, tokensToDrop...)
- branch.StagingToken = StagingToken(stagingToken)
- branch.SealedTokens = make([]StagingToken, 0)
- return branch, nil
- })
- return err
-}
-
func (g *Graveler) WriteMetaRangeByIterator(ctx context.Context, repository *RepositoryRecord, it ValueIterator) (*MetaRangeID, error) {
return g.CommittedManager.WriteMetaRangeByIterator(ctx, repository.StorageNamespace, it, nil)
}
diff --git a/pkg/graveler/mock/graveler.go b/pkg/graveler/mock/graveler.go
index b3a6ebf62c3..aa973854438 100644
--- a/pkg/graveler/mock/graveler.go
+++ b/pkg/graveler/mock/graveler.go
@@ -986,20 +986,6 @@ func (mr *MockPlumbingMockRecorder) StageObject(ctx, stagingToken, object interf
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "StageObject", reflect.TypeOf((*MockPlumbing)(nil).StageObject), ctx, stagingToken, object)
}
-// UpdateBranchToken mocks base method.
-func (m *MockPlumbing) UpdateBranchToken(ctx context.Context, repository *graveler.RepositoryRecord, branchID, stagingToken string) error {
- m.ctrl.T.Helper()
- ret := m.ctrl.Call(m, "UpdateBranchToken", ctx, repository, branchID, stagingToken)
- ret0, _ := ret[0].(error)
- return ret0
-}
-
-// UpdateBranchToken indicates an expected call of UpdateBranchToken.
-func (mr *MockPlumbingMockRecorder) UpdateBranchToken(ctx, repository, branchID, stagingToken interface{}) *gomock.Call {
- mr.mock.ctrl.T.Helper()
- return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateBranchToken", reflect.TypeOf((*MockPlumbing)(nil).UpdateBranchToken), ctx, repository, branchID, stagingToken)
-}
-
// WriteMetaRange mocks base method.
func (m *MockPlumbing) WriteMetaRange(ctx context.Context, repository *graveler.RepositoryRecord, ranges []*graveler.RangeInfo) (*graveler.MetaRangeInfo, error) {
m.ctrl.T.Helper()
diff --git a/pkg/local/sync.go b/pkg/local/sync.go
index 35865804bce..c0afe49c7d7 100644
--- a/pkg/local/sync.go
+++ b/pkg/local/sync.go
@@ -75,17 +75,16 @@ func (s *SyncManager) Sync(rootPath string, remote *uri.URI, changeSet <-chan *C
defer s.progressBar.Stop()
wg, ctx := errgroup.WithContext(s.ctx)
- wg.SetLimit(s.maxParallelism)
- for change := range changeSet {
- c := change
- if err := ctx.Err(); err != nil {
- return err
- }
+ for i := 0; i < s.maxParallelism; i++ {
wg.Go(func() error {
- return s.apply(ctx, rootPath, remote, c)
+ for change := range changeSet {
+ if err := s.apply(ctx, rootPath, remote, change); err != nil {
+ return err
+ }
+ }
+ return nil
})
}
-
if err := wg.Wait(); err != nil {
return err
}
@@ -93,48 +92,41 @@ func (s *SyncManager) Sync(rootPath string, remote *uri.URI, changeSet <-chan *C
return err
}
-func (s *SyncManager) apply(ctx context.Context, rootPath string, remote *uri.URI, change *Change) (err error) {
+func (s *SyncManager) apply(ctx context.Context, rootPath string, remote *uri.URI, change *Change) error {
switch change.Type {
case ChangeTypeAdded, ChangeTypeModified:
switch change.Source {
case ChangeSourceRemote:
- // remote changed something, download it!
- err = s.download(ctx, rootPath, remote, change)
- if err != nil {
- err = fmt.Errorf("download %s failed: %w", change.Path, err)
+ // remotely changed something, download it!
+ if err := s.download(ctx, rootPath, remote, change); err != nil {
+ return fmt.Errorf("download %s failed: %w", change.Path, err)
}
- return err
case ChangeSourceLocal:
// we wrote something, upload it!
- err = s.upload(ctx, rootPath, remote, change)
- if err != nil {
- err = fmt.Errorf("upload %s failed: %w", change.Path, err)
+ if err := s.upload(ctx, rootPath, remote, change); err != nil {
+ return fmt.Errorf("upload %s failed: %w", change.Path, err)
}
- return err
default:
panic("invalid change source")
}
case ChangeTypeRemoved:
if change.Source == ChangeSourceRemote {
// remote deleted something, delete it locally!
- err = s.deleteLocal(rootPath, change)
- if err != nil {
- err = fmt.Errorf("delete local %s failed: %w", change.Path, err)
+ if err := s.deleteLocal(rootPath, change); err != nil {
+ return fmt.Errorf("delete local %s failed: %w", change.Path, err)
}
- return err
} else {
// we deleted something, delete it on remote!
- err = s.deleteRemote(ctx, remote, change)
- if err != nil {
- err = fmt.Errorf("delete remote %s failed: %w", change.Path, err)
+ if err := s.deleteRemote(ctx, remote, change); err != nil {
+ return fmt.Errorf("delete remote %s failed: %w", change.Path, err)
}
- return err
}
case ChangeTypeConflict:
return ErrConflict
default:
panic("invalid change type")
}
+ return nil
}
func (s *SyncManager) download(ctx context.Context, rootPath string, remote *uri.URI, change *Change) error {
@@ -175,8 +167,8 @@ func (s *SyncManager) download(ctx context.Context, rootPath string, remote *uri
sizeBytes := swag.Int64Value(statResp.JSON200.SizeBytes)
f, err := os.Create(destination)
if err != nil {
- // sometimes we get a file that is actually a directory marker.
- // spark loves writing those. If we already have the directory we can skip it.
+ // Sometimes we get a file that is actually a directory marker (Spark loves writing those).
+ // If we already have the directory, we can skip it.
if errors.Is(err, syscall.EISDIR) && sizeBytes == 0 {
return nil // no further action required!
}
@@ -187,7 +179,7 @@ func (s *SyncManager) download(ctx context.Context, rootPath string, remote *uri
}()
if sizeBytes == 0 { // if size is empty just create file
- spinner := s.progressBar.AddSpinner(fmt.Sprintf("download %s", change.Path))
+ spinner := s.progressBar.AddSpinner("download " + change.Path)
atomic.AddUint64(&s.tasks.Downloaded, 1)
defer spinner.Done()
} else { // Download file
@@ -239,7 +231,7 @@ func (s *SyncManager) download(ctx context.Context, rootPath string, remote *uri
}
// set mtime to the server returned one
- err = os.Chtimes(destination, time.Now(), lastModified) // Explicit to catch in defer func
+ err = os.Chtimes(destination, time.Now(), lastModified) // Explicit to catch in deferred func
return err
}
@@ -292,7 +284,7 @@ func (s *SyncManager) upload(ctx context.Context, rootPath string, remote *uri.U
}
func (s *SyncManager) deleteLocal(rootPath string, change *Change) (err error) {
- b := s.progressBar.AddSpinner(fmt.Sprintf("delete local: %s", change.Path))
+ b := s.progressBar.AddSpinner("delete local: " + change.Path)
defer func() {
defer func() {
if err != nil {
@@ -312,7 +304,7 @@ func (s *SyncManager) deleteLocal(rootPath string, change *Change) (err error) {
}
func (s *SyncManager) deleteRemote(ctx context.Context, remote *uri.URI, change *Change) (err error) {
- b := s.progressBar.AddSpinner(fmt.Sprintf("delete remote path: %s", change.Path))
+ b := s.progressBar.AddSpinner("delete remote path: " + change.Path)
defer func() {
if err != nil {
b.Error()
diff --git a/webui/package-lock.json b/webui/package-lock.json
index ede4c9fd672..9f7692b2538 100644
--- a/webui/package-lock.json
+++ b/webui/package-lock.json
@@ -1235,9 +1235,9 @@
}
},
"node_modules/@mdn/browser-compat-data": {
- "version": "4.2.1",
- "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-4.2.1.tgz",
- "integrity": "sha512-EWUguj2kd7ldmrF9F+vI5hUOralPd+sdsUnYbRy33vZTuZkduC1shE9TtEMEjAQwyfyMb4ole5KtjF8MsnQOlA==",
+ "version": "5.3.15",
+ "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-5.3.15.tgz",
+ "integrity": "sha512-h/luqw9oAmMF1C/GuUY/PAgZlF4wx71q2bdH+ct8vmjcvseCY32au8XmYy7xZ8l5VJiY/3ltFpr5YiO55v0mzg==",
"dev": true
},
"node_modules/@mui/base": {
@@ -2715,20 +2715,14 @@
}
},
"node_modules/ast-metadata-inferer": {
- "version": "0.7.0",
- "resolved": "https://registry.npmjs.org/ast-metadata-inferer/-/ast-metadata-inferer-0.7.0.tgz",
- "integrity": "sha512-OkMLzd8xelb3gmnp6ToFvvsHLtS6CbagTkFQvQ+ZYFe3/AIl9iKikNR9G7pY3GfOR/2Xc222hwBjzI7HLkE76Q==",
+ "version": "0.8.0",
+ "resolved": "https://registry.npmjs.org/ast-metadata-inferer/-/ast-metadata-inferer-0.8.0.tgz",
+ "integrity": "sha512-jOMKcHht9LxYIEQu+RVd22vtgrPaVCtDRQ/16IGmurdzxvYbDd5ynxjnyrzLnieG96eTcAyaoj/wN/4/1FyyeA==",
"dev": true,
"dependencies": {
- "@mdn/browser-compat-data": "^3.3.14"
+ "@mdn/browser-compat-data": "^5.2.34"
}
},
- "node_modules/ast-metadata-inferer/node_modules/@mdn/browser-compat-data": {
- "version": "3.3.14",
- "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-3.3.14.tgz",
- "integrity": "sha512-n2RC9d6XatVbWFdHLimzzUJxJ1KY8LdjqrW6YvGPiRmsHkhOUx74/Ct10x5Yo7bC/Jvqx7cDEW8IMPv/+vwEzA==",
- "dev": true
- },
"node_modules/astral-regex": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz",
@@ -2908,9 +2902,9 @@
}
},
"node_modules/browserslist": {
- "version": "4.21.4",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz",
- "integrity": "sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==",
+ "version": "4.21.10",
+ "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.10.tgz",
+ "integrity": "sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ==",
"funding": [
{
"type": "opencollective",
@@ -2919,13 +2913,17 @@
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/browserslist"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
}
],
"dependencies": {
- "caniuse-lite": "^1.0.30001400",
- "electron-to-chromium": "^1.4.251",
- "node-releases": "^2.0.6",
- "update-browserslist-db": "^1.0.9"
+ "caniuse-lite": "^1.0.30001517",
+ "electron-to-chromium": "^1.4.477",
+ "node-releases": "^2.0.13",
+ "update-browserslist-db": "^1.0.11"
},
"bin": {
"browserslist": "cli.js"
@@ -2999,9 +2997,9 @@
}
},
"node_modules/caniuse-lite": {
- "version": "1.0.30001517",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001517.tgz",
- "integrity": "sha512-Vdhm5S11DaFVLlyiKu4hiUTkpZu+y1KA/rZZqVQfOD5YdDT/eQKlkt7NaE0WGOFgX32diqt9MiP9CAiFeRklaA==",
+ "version": "1.0.30001534",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001534.tgz",
+ "integrity": "sha512-vlPVrhsCS7XaSh2VvWluIQEzVhefrUQcEsQWSS5A5V+dM07uv1qHeQzAOTGIMy9i3e9bH15+muvI/UHojVgS/Q==",
"funding": [
{
"type": "opencollective",
@@ -3416,17 +3414,6 @@
"safe-buffer": "~5.1.1"
}
},
- "node_modules/core-js": {
- "version": "3.26.1",
- "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.26.1.tgz",
- "integrity": "sha512-21491RRQVzUn0GGM9Z1Jrpr6PNPxPi+Za8OM9q4tksTSnlbXXGKK1nXNg/QvwFYettXvSX6zWKCtHHfjN4puyA==",
- "dev": true,
- "hasInstallScript": true,
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/core-js"
- }
- },
"node_modules/cosmiconfig": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz",
@@ -3835,9 +3822,9 @@
"dev": true
},
"node_modules/electron-to-chromium": {
- "version": "1.4.284",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz",
- "integrity": "sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA=="
+ "version": "1.4.520",
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.520.tgz",
+ "integrity": "sha512-Frfus2VpYADsrh1lB3v/ft/WVFlVzOIm+Q0p7U7VqHI6qr7NWHYKe+Wif3W50n7JAFoBsWVsoU0+qDks6WQ60g=="
},
"node_modules/emoji-regex": {
"version": "8.0.0",
@@ -4159,22 +4146,21 @@
}
},
"node_modules/eslint-plugin-compat": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/eslint-plugin-compat/-/eslint-plugin-compat-4.0.2.tgz",
- "integrity": "sha512-xqvoO54CLTVaEYGMzhu35Wzwk/As7rCvz/2dqwnFiWi0OJccEtGIn+5qq3zqIu9nboXlpdBN579fZcItC73Ycg==",
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-compat/-/eslint-plugin-compat-4.2.0.tgz",
+ "integrity": "sha512-RDKSYD0maWy5r7zb5cWQS+uSPc26mgOzdORJ8hxILmWM7S/Ncwky7BcAtXVY5iRbKjBdHsWU8Yg7hfoZjtkv7w==",
"dev": true,
"dependencies": {
- "@mdn/browser-compat-data": "^4.1.5",
- "ast-metadata-inferer": "^0.7.0",
- "browserslist": "^4.16.8",
- "caniuse-lite": "^1.0.30001304",
- "core-js": "^3.16.2",
+ "@mdn/browser-compat-data": "^5.3.13",
+ "ast-metadata-inferer": "^0.8.0",
+ "browserslist": "^4.21.10",
+ "caniuse-lite": "^1.0.30001524",
"find-up": "^5.0.0",
- "lodash.memoize": "4.1.2",
- "semver": "7.3.5"
+ "lodash.memoize": "^4.1.2",
+ "semver": "^7.5.4"
},
"engines": {
- "node": ">=9.x"
+ "node": ">=14.x"
},
"peerDependencies": {
"eslint": "^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0"
@@ -4227,9 +4213,9 @@
}
},
"node_modules/eslint-plugin-compat/node_modules/semver": {
- "version": "7.3.5",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
- "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
+ "version": "7.5.4",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
+ "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
"dev": true,
"dependencies": {
"lru-cache": "^6.0.0"
@@ -7210,9 +7196,9 @@
}
},
"node_modules/node-releases": {
- "version": "2.0.6",
- "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz",
- "integrity": "sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg=="
+ "version": "2.0.13",
+ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz",
+ "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ=="
},
"node_modules/normalize-path": {
"version": "3.0.0",
@@ -9427,9 +9413,9 @@
}
},
"node_modules/update-browserslist-db": {
- "version": "1.0.10",
- "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz",
- "integrity": "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==",
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz",
+ "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==",
"funding": [
{
"type": "opencollective",
@@ -9438,6 +9424,10 @@
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/browserslist"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
}
],
"dependencies": {
@@ -9445,7 +9435,7 @@
"picocolors": "^1.0.0"
},
"bin": {
- "browserslist-lint": "cli.js"
+ "update-browserslist-db": "cli.js"
},
"peerDependencies": {
"browserslist": ">= 4.21.0"
@@ -10876,9 +10866,9 @@
}
},
"@mdn/browser-compat-data": {
- "version": "4.2.1",
- "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-4.2.1.tgz",
- "integrity": "sha512-EWUguj2kd7ldmrF9F+vI5hUOralPd+sdsUnYbRy33vZTuZkduC1shE9TtEMEjAQwyfyMb4ole5KtjF8MsnQOlA==",
+ "version": "5.3.15",
+ "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-5.3.15.tgz",
+ "integrity": "sha512-h/luqw9oAmMF1C/GuUY/PAgZlF4wx71q2bdH+ct8vmjcvseCY32au8XmYy7xZ8l5VJiY/3ltFpr5YiO55v0mzg==",
"dev": true
},
"@mui/base": {
@@ -11908,20 +11898,12 @@
"dev": true
},
"ast-metadata-inferer": {
- "version": "0.7.0",
- "resolved": "https://registry.npmjs.org/ast-metadata-inferer/-/ast-metadata-inferer-0.7.0.tgz",
- "integrity": "sha512-OkMLzd8xelb3gmnp6ToFvvsHLtS6CbagTkFQvQ+ZYFe3/AIl9iKikNR9G7pY3GfOR/2Xc222hwBjzI7HLkE76Q==",
+ "version": "0.8.0",
+ "resolved": "https://registry.npmjs.org/ast-metadata-inferer/-/ast-metadata-inferer-0.8.0.tgz",
+ "integrity": "sha512-jOMKcHht9LxYIEQu+RVd22vtgrPaVCtDRQ/16IGmurdzxvYbDd5ynxjnyrzLnieG96eTcAyaoj/wN/4/1FyyeA==",
"dev": true,
"requires": {
- "@mdn/browser-compat-data": "^3.3.14"
- },
- "dependencies": {
- "@mdn/browser-compat-data": {
- "version": "3.3.14",
- "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-3.3.14.tgz",
- "integrity": "sha512-n2RC9d6XatVbWFdHLimzzUJxJ1KY8LdjqrW6YvGPiRmsHkhOUx74/Ct10x5Yo7bC/Jvqx7cDEW8IMPv/+vwEzA==",
- "dev": true
- }
+ "@mdn/browser-compat-data": "^5.2.34"
}
},
"astral-regex": {
@@ -12058,14 +12040,14 @@
}
},
"browserslist": {
- "version": "4.21.4",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz",
- "integrity": "sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==",
+ "version": "4.21.10",
+ "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.10.tgz",
+ "integrity": "sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ==",
"requires": {
- "caniuse-lite": "^1.0.30001400",
- "electron-to-chromium": "^1.4.251",
- "node-releases": "^2.0.6",
- "update-browserslist-db": "^1.0.9"
+ "caniuse-lite": "^1.0.30001517",
+ "electron-to-chromium": "^1.4.477",
+ "node-releases": "^2.0.13",
+ "update-browserslist-db": "^1.0.11"
}
},
"buffer": {
@@ -12110,9 +12092,9 @@
}
},
"caniuse-lite": {
- "version": "1.0.30001517",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001517.tgz",
- "integrity": "sha512-Vdhm5S11DaFVLlyiKu4hiUTkpZu+y1KA/rZZqVQfOD5YdDT/eQKlkt7NaE0WGOFgX32diqt9MiP9CAiFeRklaA=="
+ "version": "1.0.30001534",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001534.tgz",
+ "integrity": "sha512-vlPVrhsCS7XaSh2VvWluIQEzVhefrUQcEsQWSS5A5V+dM07uv1qHeQzAOTGIMy9i3e9bH15+muvI/UHojVgS/Q=="
},
"ccount": {
"version": "2.0.1",
@@ -12389,12 +12371,6 @@
"safe-buffer": "~5.1.1"
}
},
- "core-js": {
- "version": "3.26.1",
- "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.26.1.tgz",
- "integrity": "sha512-21491RRQVzUn0GGM9Z1Jrpr6PNPxPi+Za8OM9q4tksTSnlbXXGKK1nXNg/QvwFYettXvSX6zWKCtHHfjN4puyA==",
- "dev": true
- },
"cosmiconfig": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz",
@@ -12712,9 +12688,9 @@
"dev": true
},
"electron-to-chromium": {
- "version": "1.4.284",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz",
- "integrity": "sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA=="
+ "version": "1.4.520",
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.520.tgz",
+ "integrity": "sha512-Frfus2VpYADsrh1lB3v/ft/WVFlVzOIm+Q0p7U7VqHI6qr7NWHYKe+Wif3W50n7JAFoBsWVsoU0+qDks6WQ60g=="
},
"emoji-regex": {
"version": "8.0.0",
@@ -13092,19 +13068,18 @@
}
},
"eslint-plugin-compat": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/eslint-plugin-compat/-/eslint-plugin-compat-4.0.2.tgz",
- "integrity": "sha512-xqvoO54CLTVaEYGMzhu35Wzwk/As7rCvz/2dqwnFiWi0OJccEtGIn+5qq3zqIu9nboXlpdBN579fZcItC73Ycg==",
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-compat/-/eslint-plugin-compat-4.2.0.tgz",
+ "integrity": "sha512-RDKSYD0maWy5r7zb5cWQS+uSPc26mgOzdORJ8hxILmWM7S/Ncwky7BcAtXVY5iRbKjBdHsWU8Yg7hfoZjtkv7w==",
"dev": true,
"requires": {
- "@mdn/browser-compat-data": "^4.1.5",
- "ast-metadata-inferer": "^0.7.0",
- "browserslist": "^4.16.8",
- "caniuse-lite": "^1.0.30001304",
- "core-js": "^3.16.2",
+ "@mdn/browser-compat-data": "^5.3.13",
+ "ast-metadata-inferer": "^0.8.0",
+ "browserslist": "^4.21.10",
+ "caniuse-lite": "^1.0.30001524",
"find-up": "^5.0.0",
- "lodash.memoize": "4.1.2",
- "semver": "7.3.5"
+ "lodash.memoize": "^4.1.2",
+ "semver": "^7.5.4"
},
"dependencies": {
"find-up": {
@@ -13136,9 +13111,9 @@
}
},
"semver": {
- "version": "7.3.5",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
- "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
+ "version": "7.5.4",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
+ "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
"dev": true,
"requires": {
"lru-cache": "^6.0.0"
@@ -15087,9 +15062,9 @@
}
},
"node-releases": {
- "version": "2.0.6",
- "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz",
- "integrity": "sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg=="
+ "version": "2.0.13",
+ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz",
+ "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ=="
},
"normalize-path": {
"version": "3.0.0",
@@ -16692,9 +16667,9 @@
"peer": true
},
"update-browserslist-db": {
- "version": "1.0.10",
- "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz",
- "integrity": "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==",
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz",
+ "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==",
"requires": {
"escalade": "^3.1.1",
"picocolors": "^1.0.0"
diff --git a/webui/src/lib/api/index.js b/webui/src/lib/api/index.js
index efd10c7108d..d2959788319 100644
--- a/webui/src/lib/api/index.js
+++ b/webui/src/lib/api/index.js
@@ -138,36 +138,6 @@ class Auth {
}
}
- async updatePasswordByToken(token, newPassword, email) {
- const response = await fetch(`${API_ENDPOINT}/auth/password`, {
- headers: new Headers(defaultAPIHeaders),
- method: 'POST',
- body: JSON.stringify({token: token, newPassword: newPassword, email: email})
- });
-
- if (response.status === 401) {
- throw new AuthorizationError('user unauthorized');
- }
- if (response.status !== 201) {
- throw new Error('failed to update password');
- }
- }
-
- async passwordForgot(email) {
- const response = await fetch(`${API_ENDPOINT}/auth/password/forgot`, {
- headers: new Headers(defaultAPIHeaders),
- method: 'POST',
- body: JSON.stringify({email: email})
- });
-
- if (response.status === 400) {
- throw new BadRequestError("invalid email");
- }
- if (response.status !== 204) {
- throw new Error('failed to request password reset');
- }
- }
-
async login(accessKeyId, secretAccessKey) {
const response = await fetch(`${API_ENDPOINT}/auth/login`, {
headers: new Headers(defaultAPIHeaders),
@@ -569,16 +539,6 @@ class Branches {
}
return response.json();
}
-
- async updateToken(repoId, branch, staging_token) {
- const response = await apiRequest(`/repositories/${encodeURIComponent(repoId)}/branches/${encodeURIComponent(branch)}/update_token`, {
- method: 'PUT',
- body: JSON.stringify({staging_token: staging_token}),
- });
- if (response.status !== 201) {
- throw new Error(await extractError(response));
- }
- }
}
@@ -1032,10 +992,8 @@ class BranchProtectionRules {
async createRulePreflight(repoID) {
const response = await apiRequest(`/repositories/${encodeURIComponent(repoID)}/branch_protection/set_allowed`);
- if (response.status !== 204) {
- return false;
- }
- return true;
+ return response.status === 204;
+
}
async createRule(repoID, pattern) {
@@ -1063,48 +1021,6 @@ class BranchProtectionRules {
}
-class Ranges {
- async createRange(repoID, fromSourceURI, after, prepend, continuation_token = "", staging_token="") {
- const response = await apiRequest(`/repositories/${repoID}/branches/ranges`, {
- method: 'POST',
- body: JSON.stringify({fromSourceURI, after, prepend, continuation_token, staging_token}),
- });
- if (response.status !== 201) {
- throw new Error(await extractError(response));
- }
- return response.json();
- }
-}
-
-class MetaRanges {
- async createMetaRange(repoID, ranges) {
- const response = await apiRequest(`/repositories/${repoID}/branches/metaranges`, {
- method: 'POST',
- body: JSON.stringify({ranges}),
- });
- if (response.status !== 201) {
- throw new Error(await extractError(response));
- }
- return response.json();
- }
-}
-
-class Templates {
- async expandTemplate(templateLocation, params) {
- const urlParams = new URLSearchParams();
- for (const [k, v] of Object.entries(params)) {
- urlParams.set(k, v);
- }
- const response = await apiRequest(
- `/templates/${encodeURI(templateLocation)}?${urlParams.toString()}`,
- {method: 'GET'});
- if (!response.ok) {
- throw new Error(await extractError(response));
- }
- return response.text();
- }
-}
-
class Statistics {
async postStatsEvents(statsEvents) {
const request = {
@@ -1219,9 +1135,6 @@ export const actions = new Actions();
export const retention = new Retention();
export const config = new Config();
export const branchProtectionRules = new BranchProtectionRules();
-export const ranges = new Ranges();
-export const metaRanges = new MetaRanges();
-export const templates = new Templates();
export const statistics = new Statistics();
export const staging = new Staging();
export const otfDiffs = new OTFDiffs();
diff --git a/webui/src/lib/components/repository/ObjectsDiff.jsx b/webui/src/lib/components/repository/ObjectsDiff.jsx
index 2a26c132dde..7209078e42e 100644
--- a/webui/src/lib/components/repository/ObjectsDiff.jsx
+++ b/webui/src/lib/components/repository/ObjectsDiff.jsx
@@ -65,12 +65,9 @@ function readableObject(path) {
const NoContentDiff = ({left, right, diffType}) => {
const supportedFileExtensions = supportedReadableFormats.map((fileType) => `.${fileType}`);
- // use the list formatter in place of manual formatting
- // ref: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Intl/ListFormat
- const formatter = new Intl.ListFormat('en', { style: "long", type: "conjunction" });
return
- {`lakeFS supports content diff for ${formatter.format(supportedFileExtensions)} file formats only`}
+ {`lakeFS supports content diff for ${supportedFileExtensions.join(',')} file formats only`}
;
}
diff --git a/webui/src/lib/components/repository/commits.jsx b/webui/src/lib/components/repository/commits.jsx
new file mode 100644
index 00000000000..71a3b241b63
--- /dev/null
+++ b/webui/src/lib/components/repository/commits.jsx
@@ -0,0 +1,158 @@
+import ButtonGroup from "react-bootstrap/ButtonGroup";
+import {ClipboardButton, LinkButton} from "../controls";
+import {BrowserIcon, LinkIcon, PackageIcon, PlayIcon} from "@primer/octicons-react";
+import Table from "react-bootstrap/Table";
+import {MetadataRow, MetadataUIButton} from "../../../pages/repositories/repository/commits/commit/metadata";
+import {Link} from "../nav";
+import dayjs from "dayjs";
+import Card from "react-bootstrap/Card";
+import React from "react";
+
+
+const CommitActions = ({ repo, commit }) => {
+
+ const buttonVariant = "outline-dark";
+
+ return (
+