diff --git a/README.md b/README.md index 659493e..f749cc4 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,107 @@ +# Databricks Java Rest Client -# How To Run Integration Tests +_This is a simple java library that provides programmatic access to the [Databricks Rest Service](https://docs.databricks.com/api/latest/index.html)._ + + +## API Overview + +[![Javadocs](http://www.javadoc.io/badge/com.edmunds.databricks/databricks-rest-client.svg)](http://www.javadoc.io/doc/com.edmunds.databricks/databricks-rest-client) + +This library only implements a percentage of all of the functionality that the Databricks Rest Interface provides. +The idea is to add functionality as users of this library need it. +Here are the current Endpoints that are supported: + +- Cluster Service + +- Dbfs Service + +- Job Service + +- Library Service + +- Workspace Service + +Please look at the javadocs for the specific service to get more detailed information on what +functionality is currently available. + +If there is important functionality that is currently missing, please create a github issue. + +## Examples +```java +public class MyClient { + public static void main(String[] args) throws DatabricksRestException, IOException { + // Construct a serviceFactory using token authentication + DatabricksServiceFactory serviceFactory = + DatabricksServiceFactory.Builder + .createServiceFactoryWithTokenAuthentication("myToken", "myHost") + .withMaxRetries(5) + .withRetryInterval(10000L) + .build(); + + // Lets get our databricks job "myJob" and edit maxRetries to 5 + JobDTO jobDTO = serviceFactory.getJobService().getJobByName("myJob"); + JobSettingsDTO jobSettingsDTO = jobDTO.getSettings(); + jobSettingsDTO.setMaxRetries(5); + serviceFactory.getJobService().upsertJob(jobSettingsDTO, true); + + // Lets install a jar to a specific cluster + LibraryDTO libraryDTO = new LibraryDTO(); + libraryDTO.setJar("s3://myBucket/myJar.jar"); + for (ClusterInfoDTO clusterInfoDTO : serviceFactory.getClusterService().list()) { + if (clusterInfoDTO.getClusterName().equals("myCluster")) { + serviceFactory.getLibraryService().install(clusterInfoDTO.getClusterId(), new LibraryDTO[]{libraryDTO}); + } + } + } +} +``` +For more examples, take a look at the service tests. + +## Building, Installing and Running + +### Getting Started and Prerequisites + +- You will need Maven installed + +### Building + +*How to build the project locally:* +```mvn clean install``` + + +## Unit Tests + +There are currently no unit tests for this project. Our thoughts are that the only testable +functionality is the integration between our client on an actual databricks instance. +As such we currently only have integration tests. + + +## Integration Tests +IMPORTANT: integration tests do not execute automatically as part of a build. +It is your responsibility (and Pull Request Reviewers) to make sure the integration tests +pass before merging in code. + +### Setup You need to set the following environment properties in your .bash_profile +```bash export DB_USER=myuser@domain.com export DB_PASSWORD=mypassword export DB_URL=my-databricks-account.databricks.com +export DB_TOKEN=my-token +``` +In order for the integration tests to run, you must +have a valid token for the user in question. +Here is how to set it up: [Set up Tokens](https://docs.databricks.com/api/latest/authentication.html) - -# To execute the integration tests please run: +### Executing Integration Tests mvn clean install org.apache.maven.plugins:maven-failsafe-plugin:integration-test +## Deployment + +Please see the CONTRIBUTING.md about our release process. +As this is a library, there is no deployment operation needed. + +## Contributing + +Please read [CONTRIBUTING.md](CONTRIBUTING.md) for the process for merging code into master. diff --git a/src/main/java/com/edmunds/rest/databricks/DatabricksServiceFactory.java b/src/main/java/com/edmunds/rest/databricks/DatabricksServiceFactory.java index f041733..22e5cfd 100644 --- a/src/main/java/com/edmunds/rest/databricks/DatabricksServiceFactory.java +++ b/src/main/java/com/edmunds/rest/databricks/DatabricksServiceFactory.java @@ -16,6 +16,9 @@ package com.edmunds.rest.databricks; +import com.edmunds.rest.databricks.restclient.DatabricksRestClient; +import com.edmunds.rest.databricks.restclient.DatabricksRestClientImpl; +import com.edmunds.rest.databricks.restclient.DatabricksRestClientImpl425; import com.edmunds.rest.databricks.service.ClusterService; import com.edmunds.rest.databricks.service.ClusterServiceImpl; import com.edmunds.rest.databricks.service.DbfsService; @@ -29,7 +32,9 @@ /** - * Factory class for all other specific Databricks Service Wrappers. + * This is the class that clients should interact with. + * It provides singletons for all of the Services, as well as + * abstracting the construction of the databricks rest client. */ public final class DatabricksServiceFactory { @@ -43,6 +48,11 @@ public final class DatabricksServiceFactory { private JobService jobService; private DbfsService dbfsService; + public DatabricksServiceFactory(DatabricksRestClient databricksRestClient) { + this.client2dot0 = databricksRestClient; + } + + @Deprecated public DatabricksServiceFactory(String username, String password, String host) { this(username, password, host, DEFAULT_HTTP_CLIENT_MAX_RETRY, DEFAULT_HTTP_CLIENT_RETRY_INTERVAL); @@ -50,9 +60,11 @@ public DatabricksServiceFactory(String username, String password, String host) { /** * Creating a Databricks Service object. + * * @param maxRetry http client maxRetry when failed due to I/O , timeout error * @param retryInterval http client retry interval when failed due to I/O , timeout error */ + @Deprecated public DatabricksServiceFactory(String username, String password, String host, int maxRetry, long retryInterval) { this(username, password, host, maxRetry, retryInterval, false); @@ -64,17 +76,35 @@ public DatabricksServiceFactory(String username, String password, String host, i * * @param useLegacyAPI425 choose what version of API compatible HttpClient. */ + @Deprecated public DatabricksServiceFactory(String username, String password, String host, int maxRetry, long retryInterval, boolean useLegacyAPI425) { if (useLegacyAPI425) { - client2dot0 = new DatabricksRestClientImpl425(username, password, host, "2.0", maxRetry, - retryInterval); + client2dot0 = DatabricksRestClientImpl425 + .createClientWithUserPassword(username, password, host, "2.0", maxRetry, + retryInterval); } else { - client2dot0 = new DatabricksRestClientImpl(username, password, host, "2.0", maxRetry, - retryInterval); + client2dot0 = DatabricksRestClientImpl + .createClientWithUserPassword(username, password, host, "2.0", maxRetry, + retryInterval); } } + /** + * Create a databricks service factory using personal token authentication instead. + * + * @param personalToken your personal token + * @param host the databricks host + * @param maxRetry the maximum number of retries + * @param retryInterval the retry interval between each attempt + */ + @Deprecated + public DatabricksServiceFactory(String personalToken, String host, + int maxRetry, long retryInterval) { + client2dot0 = DatabricksRestClientImpl + .createClientWithTokenAuthentication(personalToken, host, "2.0", maxRetry, retryInterval); + } + /** * Will return a Databricks Cluster Service singleton. */ @@ -124,4 +154,85 @@ public DbfsService getDbfsService() { } return dbfsService; } + + /** + * This is how the DatabricksServiceFactory should be constructed. This gives flexibility to add + * more parameters later without ending up with large constructors. + */ + public static class Builder { + + long retryInterval = DEFAULT_HTTP_CLIENT_RETRY_INTERVAL; + int maxRetries = DEFAULT_HTTP_CLIENT_MAX_RETRY; + String token; + String host; + String username; + String password; + + private Builder() { + //NO-OP + } + + /** + * Creates a DatabricksServiceFactory using token authentication. + * + * @param token your databricks token + * @param host the databricks host where that token is valid + * @return the builder object + */ + public static Builder createServiceFactoryWithTokenAuthentication(String token, String host) { + Builder builder = new Builder(); + builder.token = token; + builder.host = host; + return builder; + } + + /** + * Creates a DatabrickServiceFactory using username password authentication. + * + * @param username databricks username + * @param password databricks password + * @param host the host object + * @return the builder object + */ + public static Builder createServiceFactoryWithUserPasswordAuthentication(String username, + String password, String host) { + Builder builder = new Builder(); + builder.username = username; + builder.password = password; + builder.host = host; + return builder; + } + + public Builder withMaxRetries(int maxRetries) { + this.maxRetries = maxRetries; + return this; + } + + public Builder withRetryInterval(long retryInterval) { + this.retryInterval = retryInterval; + return this; + } + + /** + * Builds a DatabricksServiceFactory. Conducts basic validation. + * + * @return the databricks service factory object + */ + public DatabricksServiceFactory build() { + if (token != null) { + return new DatabricksServiceFactory( + DatabricksRestClientImpl + .createClientWithTokenAuthentication(token, host, "2.0", maxRetries, retryInterval) + ); + } else if (username != null && password != null) { + return new DatabricksServiceFactory( + DatabricksRestClientImpl + .createClientWithUserPassword(username, password, host, "2.0", maxRetries, + retryInterval) + ); + } else { + throw new IllegalArgumentException("Token or username/password must be set!"); + } + } + } } diff --git a/src/main/java/com/edmunds/rest/databricks/JobRunner.java b/src/main/java/com/edmunds/rest/databricks/JobRunner.java index e1a7ddd..7cde518 100644 --- a/src/main/java/com/edmunds/rest/databricks/JobRunner.java +++ b/src/main/java/com/edmunds/rest/databricks/JobRunner.java @@ -83,7 +83,10 @@ private JobService getService() { String password = parser.getPassword(); String hostname = parser.getHostname(); - DatabricksServiceFactory factory = new DatabricksServiceFactory(username, password, hostname); + DatabricksServiceFactory factory = + DatabricksServiceFactory.Builder + .createServiceFactoryWithUserPasswordAuthentication(username, password, hostname) + .build(); return factory.getJobService(); } diff --git a/src/main/java/com/edmunds/rest/databricks/request/CreateClusterRequest.java b/src/main/java/com/edmunds/rest/databricks/request/CreateClusterRequest.java index 004e52e..c3e027c 100644 --- a/src/main/java/com/edmunds/rest/databricks/request/CreateClusterRequest.java +++ b/src/main/java/com/edmunds/rest/databricks/request/CreateClusterRequest.java @@ -25,9 +25,8 @@ /** * A Cluster Request object. - * Should be deprecated in favor of using DTO objects. + * TODO Should be deprecated in favor of using DTO objects. */ -@Deprecated public class CreateClusterRequest extends DatabricksRestRequest { private CreateClusterRequest(Map data) { diff --git a/src/main/java/com/edmunds/rest/databricks/request/DatabricksRestRequest.java b/src/main/java/com/edmunds/rest/databricks/request/DatabricksRestRequest.java index 0443fed..8e1d547 100644 --- a/src/main/java/com/edmunds/rest/databricks/request/DatabricksRestRequest.java +++ b/src/main/java/com/edmunds/rest/databricks/request/DatabricksRestRequest.java @@ -21,7 +21,6 @@ /** * Base class for Request Objects. */ -@Deprecated public abstract class DatabricksRestRequest { private Map data; diff --git a/src/main/java/com/edmunds/rest/databricks/request/EditClusterRequest.java b/src/main/java/com/edmunds/rest/databricks/request/EditClusterRequest.java index 61a0dae..2cf6276 100644 --- a/src/main/java/com/edmunds/rest/databricks/request/EditClusterRequest.java +++ b/src/main/java/com/edmunds/rest/databricks/request/EditClusterRequest.java @@ -25,9 +25,8 @@ /** * Edit Cluster Request object. - * Should be deprecated in favor of using DTOs. + * TODO Should be deprecated in favor of using DTOs. */ -@Deprecated public class EditClusterRequest extends DatabricksRestRequest { private EditClusterRequest(Map data) { diff --git a/src/main/java/com/edmunds/rest/databricks/request/ExportWorkspaceRequest.java b/src/main/java/com/edmunds/rest/databricks/request/ExportWorkspaceRequest.java index 0f92df0..4e25074 100644 --- a/src/main/java/com/edmunds/rest/databricks/request/ExportWorkspaceRequest.java +++ b/src/main/java/com/edmunds/rest/databricks/request/ExportWorkspaceRequest.java @@ -22,9 +22,8 @@ /** * Export Workspace Request object. - * Should be deprecated in favor of using DTOs. + * TODO Should be deprecated in favor of using DTOs. */ -@Deprecated public class ExportWorkspaceRequest extends DatabricksRestRequest { private ExportWorkspaceRequest(Map data) { diff --git a/src/main/java/com/edmunds/rest/databricks/request/ImportWorkspaceRequest.java b/src/main/java/com/edmunds/rest/databricks/request/ImportWorkspaceRequest.java index 858ac67..ebbc0ea 100644 --- a/src/main/java/com/edmunds/rest/databricks/request/ImportWorkspaceRequest.java +++ b/src/main/java/com/edmunds/rest/databricks/request/ImportWorkspaceRequest.java @@ -23,9 +23,8 @@ /** * Import Workspace Request object. - * Deprecated in favor of using DTOs + * TODO Should be deprecated in favor of using DTOs */ -@Deprecated public class ImportWorkspaceRequest extends DatabricksRestRequest { private ImportWorkspaceRequest(Map data) { diff --git a/src/main/java/com/edmunds/rest/databricks/AbstractDatabricksRestClientImpl.java b/src/main/java/com/edmunds/rest/databricks/restclient/AbstractDatabricksRestClientImpl.java similarity index 91% rename from src/main/java/com/edmunds/rest/databricks/AbstractDatabricksRestClientImpl.java rename to src/main/java/com/edmunds/rest/databricks/restclient/AbstractDatabricksRestClientImpl.java index 74bc3db..a5715fb 100644 --- a/src/main/java/com/edmunds/rest/databricks/AbstractDatabricksRestClientImpl.java +++ b/src/main/java/com/edmunds/rest/databricks/restclient/AbstractDatabricksRestClientImpl.java @@ -14,8 +14,11 @@ * limitations under the License. */ -package com.edmunds.rest.databricks; +package com.edmunds.rest.databricks.restclient; +import com.edmunds.rest.databricks.DatabricksRestException; +import com.edmunds.rest.databricks.HttpServiceUnavailableRetryStrategy; +import com.edmunds.rest.databricks.RequestMethod; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; @@ -46,8 +49,6 @@ public abstract class AbstractDatabricksRestClientImpl implements DatabricksRest private static Logger logger = Logger.getLogger(AbstractDatabricksRestClientImpl.class.getName()); protected final String apiVersion; protected final String host; - protected final String username; - protected final String password; protected String url; protected ObjectMapper mapper; @@ -58,32 +59,18 @@ public abstract class AbstractDatabricksRestClientImpl implements DatabricksRest /** * Creates a rest client. - * @param username databricks username - * @param password databricks password * @param host databricks host * @param apiVersion databricks api version * @param maxRetry how many retries * @param retryInterval interval btween retries */ - public AbstractDatabricksRestClientImpl(String username, String password, String host, - String apiVersion, - int maxRetry, long retryInterval) { - this.username = username; - this.password = password; + public AbstractDatabricksRestClientImpl(String host, String apiVersion, int maxRetry, long retryInterval) { this.host = host; this.apiVersion = apiVersion; this.retryHandler = new StandardHttpRequestRetryHandler(maxRetry, false); this.retryStrategy = new HttpServiceUnavailableRetryStrategy(maxRetry, retryInterval); - - init(); } - /** - * init url/mapper/client variable. - */ - protected abstract void init(); - - protected byte[] extractContent(HttpResponse httpResponse) throws IOException, DatabricksRestException { @@ -97,12 +84,10 @@ protected byte[] extractContent(HttpResponse httpResponse) return IOUtils.toByteArray(httpResponse.getEntity().getContent()); } - public String getHost() { return host; } - protected HttpRequestBase makeHttpMethod(RequestMethod requestMethod, String path, Map data) throws UnsupportedEncodingException, JsonProcessingException { @@ -165,5 +150,4 @@ protected StringEntity makeStringRequestEntity(Map data) return new StringEntity(body); } - } \ No newline at end of file diff --git a/src/main/java/com/edmunds/rest/databricks/DatabricksRestClient.java b/src/main/java/com/edmunds/rest/databricks/restclient/DatabricksRestClient.java similarity index 83% rename from src/main/java/com/edmunds/rest/databricks/DatabricksRestClient.java rename to src/main/java/com/edmunds/rest/databricks/restclient/DatabricksRestClient.java index 77a486f..a7fcbb1 100644 --- a/src/main/java/com/edmunds/rest/databricks/DatabricksRestClient.java +++ b/src/main/java/com/edmunds/rest/databricks/restclient/DatabricksRestClient.java @@ -14,13 +14,14 @@ * limitations under the License. */ -package com.edmunds.rest.databricks; +package com.edmunds.rest.databricks.restclient; +import com.edmunds.rest.databricks.DatabricksRestException; +import com.edmunds.rest.databricks.RequestMethod; import java.util.Map; /** - * A Simple Databricks Rest Client Interface. - * Will perform a given rest request. + * A Simple Databricks Rest Client Interface. Will perform a given rest request. */ public interface DatabricksRestClient { diff --git a/src/main/java/com/edmunds/rest/databricks/DatabricksRestClientImpl.java b/src/main/java/com/edmunds/rest/databricks/restclient/DatabricksRestClientImpl.java similarity index 63% rename from src/main/java/com/edmunds/rest/databricks/DatabricksRestClientImpl.java rename to src/main/java/com/edmunds/rest/databricks/restclient/DatabricksRestClientImpl.java index a2cd824..879d6af 100644 --- a/src/main/java/com/edmunds/rest/databricks/DatabricksRestClientImpl.java +++ b/src/main/java/com/edmunds/rest/databricks/restclient/DatabricksRestClientImpl.java @@ -14,13 +14,18 @@ * limitations under the License. */ -package com.edmunds.rest.databricks; +package com.edmunds.rest.databricks.restclient; +import com.edmunds.rest.databricks.DatabricksRestException; +import com.edmunds.rest.databricks.RequestMethod; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.Map; import javax.net.ssl.SSLContext; +import org.apache.http.Header; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.CredentialsProvider; @@ -32,42 +37,76 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClients; +import org.apache.http.message.BasicHeader; import org.apache.http.util.EntityUtils; import org.apache.log4j.Logger; /** * The main implementation of databricks rest client, which uses up to date httpclient. */ -public final class DatabricksRestClientImpl extends AbstractDatabricksRestClientImpl { +public class DatabricksRestClientImpl extends AbstractDatabricksRestClientImpl { private static Logger logger = Logger.getLogger(DatabricksRestClientImpl.class.getName()); + private DatabricksRestClientImpl(String host, String apiVersion, int maxRetry, + long retryInterval) { + super(host, apiVersion, maxRetry, retryInterval); + } - public DatabricksRestClientImpl(String username, String password, String host, String apiVersion, - int maxRetry, long retryInterval) { - super(username, password, host, apiVersion, maxRetry, retryInterval); + /** + * Constructs a rest client with user and password authentication. + */ + public static DatabricksRestClientImpl createClientWithUserPassword(String username, + String password, String host, + String apiVersion, int maxRetry, long retryInterval) { + DatabricksRestClientImpl client = new DatabricksRestClientImpl(host, apiVersion, maxRetry, + retryInterval); + client.initClientWithUserPassword(username, password); + return client; } - @Override - protected void init() { + /** + * Constructs a rest client with token authentication. + */ + public static DatabricksRestClientImpl createClientWithTokenAuthentication(String token, + String host, + String apiVersion, int maxRetry, long retryInterval) { + DatabricksRestClientImpl client = new DatabricksRestClientImpl(host, apiVersion, maxRetry, + retryInterval); + client.initClientWithToken(token); + return client; + } + + protected void initClientWithUserPassword(String username, String password) { CredentialsProvider credsProvider = new BasicCredentialsProvider(); credsProvider.setCredentials( new AuthScope(host, HTTPS_PORT), new UsernamePasswordCredentials(username, password)); - RequestConfig defaultRequestConfig = RequestConfig.custom() - .setExpectContinueEnabled(true) - .setSocketTimeout(SOCKET_TIMEOUT) - .setConnectTimeout(CONNECTION_TIMEOUT) - .setConnectionRequestTimeout(CONNECTION_REQUEST_TIMEOUT) - .build(); - HttpClientBuilder clientBuilder = HttpClients.custom() .setDefaultCredentialsProvider(credsProvider) .setRetryHandler(retryHandler) .setServiceUnavailableRetryStrategy(retryStrategy) - .setDefaultRequestConfig(defaultRequestConfig); + .setDefaultRequestConfig(createRequestConfig()); + + commonInit(clientBuilder); + } + + protected void initClientWithToken(String personalToken) { + List
headers = new ArrayList<>(); + Header authHeader = new BasicHeader("Authorization", String.format("Bearer %s", personalToken)); + headers.add(authHeader); + + HttpClientBuilder clientBuilder = HttpClients.custom() + .setRetryHandler(retryHandler) + .setServiceUnavailableRetryStrategy(retryStrategy) + .setDefaultRequestConfig(createRequestConfig()) + .setDefaultHeaders(headers); + commonInit(clientBuilder); + } + + protected void commonInit(HttpClientBuilder clientBuilder) { try { SSLContext ctx = SSLContext.getDefault(); // Allow TLSv1.2 protocol only @@ -87,6 +126,16 @@ protected void init() { mapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_DEFAULT); } + private RequestConfig createRequestConfig() { + RequestConfig defaultRequestConfig = RequestConfig.custom() + .setExpectContinueEnabled(true) + .setSocketTimeout(SOCKET_TIMEOUT) + .setConnectTimeout(CONNECTION_TIMEOUT) + .setConnectionRequestTimeout(CONNECTION_REQUEST_TIMEOUT) + .build(); + return defaultRequestConfig; + } + @Override public byte[] performQuery(RequestMethod requestMethod, String path, Map data) throws diff --git a/src/main/java/com/edmunds/rest/databricks/DatabricksRestClientImpl425.java b/src/main/java/com/edmunds/rest/databricks/restclient/DatabricksRestClientImpl425.java similarity index 77% rename from src/main/java/com/edmunds/rest/databricks/DatabricksRestClientImpl425.java rename to src/main/java/com/edmunds/rest/databricks/restclient/DatabricksRestClientImpl425.java index 4badd79..c061acf 100644 --- a/src/main/java/com/edmunds/rest/databricks/DatabricksRestClientImpl425.java +++ b/src/main/java/com/edmunds/rest/databricks/restclient/DatabricksRestClientImpl425.java @@ -14,8 +14,10 @@ * limitations under the License. */ -package com.edmunds.rest.databricks; +package com.edmunds.rest.databricks.restclient; +import com.edmunds.rest.databricks.DatabricksRestException; +import com.edmunds.rest.databricks.RequestMethod; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.ObjectMapper; import java.security.SecureRandom; @@ -39,20 +41,33 @@ import org.apache.log4j.Logger; /** - * To run on CDH5.7.1, use httpclient4.2.5 version API + * This implementation uses an older version of httpclient (version 4.2.5) + * Which allows it to be used on certain systems that enforce older versions of the library. + * This version will be eventually removed, so please don't use it unless you have to. */ +@Deprecated public final class DatabricksRestClientImpl425 extends AbstractDatabricksRestClientImpl { private static Logger logger = Logger.getLogger(DatabricksRestClientImpl425.class.getName()); + private DatabricksRestClientImpl425(String host, + String apiVersion, int maxRetry, long retryInterval) { + super(host, apiVersion, maxRetry, retryInterval); + } - public DatabricksRestClientImpl425(String username, String password, String host, + /** + * Constructs a older http-client version of user/password authentication rest client. + */ + public static DatabricksRestClientImpl425 createClientWithUserPassword(String username, + String password, String host, String apiVersion, int maxRetry, long retryInterval) { - super(username, password, host, apiVersion, maxRetry, retryInterval); + DatabricksRestClientImpl425 client = new DatabricksRestClientImpl425(host, apiVersion, maxRetry, + retryInterval); + client.initClientWithUserPassword(username, password); + return client; } - @Override - protected void init() { + protected void initClientWithUserPassword(String username, String password) { try { SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); diff --git a/src/main/java/com/edmunds/rest/databricks/service/ClusterService.java b/src/main/java/com/edmunds/rest/databricks/service/ClusterService.java index a84a3f7..80f5236 100644 --- a/src/main/java/com/edmunds/rest/databricks/service/ClusterService.java +++ b/src/main/java/com/edmunds/rest/databricks/service/ClusterService.java @@ -29,23 +29,89 @@ */ public interface ClusterService { + /** + * Creates a databricks cluster. + * https://docs.databricks.com/api/latest/clusters.html#create + * @param createClusterRequest the cluster request object + * @return the clusterId + * @throws IOException other connection errors + * @throws DatabricksRestException any errors with the request + */ String create(CreateClusterRequest createClusterRequest) throws IOException, DatabricksRestException; + /** + * Edits the configurations of a databricks cluster. + * https://docs.databricks.com/api/latest/clusters.html#edit + * @param editClusterRequest the edit cluster request object + * @throws IOException any other errors + * @throws DatabricksRestException any errors with the request + */ void edit(EditClusterRequest editClusterRequest) throws IOException, DatabricksRestException; + /** + * Starts a databricks cluster. + * https://docs.databricks.com/api/latest/clusters.html#start + * @param clusterId the clusterId of the cluster you want to start. + * @throws IOException any other errors + * @throws DatabricksRestException any errors with the request + */ void start(String clusterId) throws IOException, DatabricksRestException; + /** + * Restarts a given databricks cluster. + * https://docs.databricks.com/api/latest/clusters.html#restart + * @param clusterId the clusterid of the cluster you want to restart. + * @throws IOException any other errors + * @throws DatabricksRestException any errors with the request + */ void restart(String clusterId) throws IOException, DatabricksRestException; + /** + * Resizes a cluster. Will give it a fixed number of workers. + * https://docs.databricks.com/api/latest/clusters.html#resize + * @param numWorkers the number of workers you want to resize to + * @param clusterId the clusterId you want to operate on + * @throws IOException any other errors + * @throws DatabricksRestException any errors with request + */ void resize(int numWorkers, String clusterId) throws IOException, DatabricksRestException; + /** + * Resizes a cluster. Will add/edit autoscale functionality. + * https://docs.databricks.com/api/latest/clusters.html#resize + * @param autoscale the autoscale specs. + * @param clusterId the clusterId you want to work on + * @throws IOException any other errors + * @throws DatabricksRestException any errors with request + */ void resize(AutoScaleDTO autoscale, String clusterId) throws IOException, DatabricksRestException; + /** + * Terminates a cluster. + * https://docs.databricks.com/api/latest/clusters.html#delete-terminate + * @param clusterId the cluster you want to terminate + * @throws IOException any other errors + * @throws DatabricksRestException errors with request + */ void delete(String clusterId) throws IOException, DatabricksRestException; + /** + * Gets information about a given cluster. + * https://docs.databricks.com/api/latest/clusters.html#get + * @param clusterId the cluster you want to get info about + * @return the information DTO object + * @throws IOException any other errors. + * @throws DatabricksRestException any errors with the request + */ ClusterInfoDTO getInfo(String clusterId) throws IOException, DatabricksRestException; + /** + * Lists all of the clusters on a given databricks instance. + * @return an array of cluster information objects + * @throws IOException any other errors + * @throws DatabricksRestException any errors with the request + */ ClusterInfoDTO[] list() throws IOException, DatabricksRestException; } diff --git a/src/main/java/com/edmunds/rest/databricks/service/ClusterServiceImpl.java b/src/main/java/com/edmunds/rest/databricks/service/ClusterServiceImpl.java index 6c81529..2bc6b23 100644 --- a/src/main/java/com/edmunds/rest/databricks/service/ClusterServiceImpl.java +++ b/src/main/java/com/edmunds/rest/databricks/service/ClusterServiceImpl.java @@ -18,11 +18,11 @@ import com.edmunds.rest.databricks.DTO.AutoScaleDTO; import com.edmunds.rest.databricks.DTO.ClusterInfoDTO; -import com.edmunds.rest.databricks.DatabricksRestClient; import com.edmunds.rest.databricks.DatabricksRestException; import com.edmunds.rest.databricks.RequestMethod; import com.edmunds.rest.databricks.request.CreateClusterRequest; import com.edmunds.rest.databricks.request.EditClusterRequest; +import com.edmunds.rest.databricks.restclient.DatabricksRestClient; import com.fasterxml.jackson.core.type.TypeReference; import java.io.IOException; import java.util.HashMap; diff --git a/src/main/java/com/edmunds/rest/databricks/service/DatabricksService.java b/src/main/java/com/edmunds/rest/databricks/service/DatabricksService.java index 6d0eea1..163c4f8 100644 --- a/src/main/java/com/edmunds/rest/databricks/service/DatabricksService.java +++ b/src/main/java/com/edmunds/rest/databricks/service/DatabricksService.java @@ -16,7 +16,7 @@ package com.edmunds.rest.databricks.service; -import com.edmunds.rest.databricks.DatabricksRestClient; +import com.edmunds.rest.databricks.restclient.DatabricksRestClient; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; diff --git a/src/main/java/com/edmunds/rest/databricks/service/DbfsService.java b/src/main/java/com/edmunds/rest/databricks/service/DbfsService.java index d78e906..521e33e 100644 --- a/src/main/java/com/edmunds/rest/databricks/service/DbfsService.java +++ b/src/main/java/com/edmunds/rest/databricks/service/DbfsService.java @@ -28,22 +28,91 @@ */ public interface DbfsService { + /** + * Removes a dbfs path. + * https://docs.databricks.com/api/latest/dbfs.html#delete + * @param path the path to delete + * @param recursive whether or not it should be recursive + * @throws IOException any other errors + * @throws DatabricksRestException any errors in request + */ void rm(String path, boolean recursive) throws IOException, DatabricksRestException; + /** + * Get info of a specific file or directory on dbfs. + * https://docs.databricks.com/api/latest/dbfs.html#get-status + * @param path the dbfs path + * @return the file info status object + * @throws IOException any other errors + * @throws DatabricksRestException any errors in request + */ FileInfoDTO getInfo(String path) throws IOException, DatabricksRestException; + /** + * Lists files and directories in a dbfs path. + * https://docs.databricks.com/api/latest/dbfs.html#list + * @param path the dbfs path + * @return an array of file info objects. + * @throws IOException any other errors + * @throws DatabricksRestException any errors in request + */ FileInfoDTO[] ls(String path) throws IOException, DatabricksRestException; + /** + * Makes a directory (and parent directories) at a given path. + * https://docs.databricks.com/api/latest/dbfs.html#mkdirs + * @param path the dbfs path to create + * @throws IOException any other errors + * @throws DatabricksRestException any errors in request + */ void mkdirs(String path) throws IOException, DatabricksRestException; + /** + * Moves a file from one path to another. + * https://docs.databricks.com/api/latest/dbfs.html#move + * @param sourcePath the source dbfs path + * @param destinationPath the destination dbfs path + * @throws IOException any other errors + * @throws DatabricksRestException any errors with request + */ void mv(String sourcePath, String destinationPath) throws IOException, DatabricksRestException; + /** + * Will create a file and write to that file on dbfs. + * Combination of: + * https://docs.databricks.com/api/latest/dbfs.html#create + * and + * https://docs.databricks.com/api/latest/dbfs.html#put + * @param path the path to create a file/write to + * @param inputStream the stream to output to dbfs + * @param overwrite whether or not you want to overwrite the file + * @throws IOException if any other errors + * @throws DatabricksRestException any errors in request + */ void write(String path, InputStream inputStream, boolean overwrite) throws IOException, DatabricksRestException; + /** + * Reads a file from dbfs. + * https://docs.databricks.com/api/latest/dbfs.html#read + * @param path the dbfs path to read from + * @param offset the offset of the file you want to read from + * @param length how many bytes you want to read + * @return the read DTO object + * @throws IOException any other errors + * @throws DatabricksRestException any errors in request + */ DbfsReadDTO read(String path, long offset, long length) throws IOException, DatabricksRestException; + /** + * Will read a whole file. + * https://docs.databricks.com/api/latest/dbfs.html#read + * @param path the dbfs path to read from + * @return the dbfs read DTO object + * @throws IOException any other errors + * @throws DatabricksRestException any errors in request + */ DbfsReadDTO read(String path) throws IOException, DatabricksRestException; } \ No newline at end of file diff --git a/src/main/java/com/edmunds/rest/databricks/service/DbfsServiceImpl.java b/src/main/java/com/edmunds/rest/databricks/service/DbfsServiceImpl.java index 2cb71c9..f740bc4 100644 --- a/src/main/java/com/edmunds/rest/databricks/service/DbfsServiceImpl.java +++ b/src/main/java/com/edmunds/rest/databricks/service/DbfsServiceImpl.java @@ -18,9 +18,9 @@ import com.edmunds.rest.databricks.DTO.DbfsReadDTO; import com.edmunds.rest.databricks.DTO.FileInfoDTO; -import com.edmunds.rest.databricks.DatabricksRestClient; import com.edmunds.rest.databricks.DatabricksRestException; import com.edmunds.rest.databricks.RequestMethod; +import com.edmunds.rest.databricks.restclient.DatabricksRestClient; import com.fasterxml.jackson.core.type.TypeReference; import java.io.IOException; import java.io.InputStream; diff --git a/src/main/java/com/edmunds/rest/databricks/service/JobService.java b/src/main/java/com/edmunds/rest/databricks/service/JobService.java index 39602ef..2cce2b6 100644 --- a/src/main/java/com/edmunds/rest/databricks/service/JobService.java +++ b/src/main/java/com/edmunds/rest/databricks/service/JobService.java @@ -30,32 +30,34 @@ import java.util.regex.Pattern; /** - * A wrapper around the Databricks Job API. https://docs.databricks.com/api/latest/jobs.html + * A wrapper around the Databricks Job API. + * https://docs.databricks.com/api/latest/jobs.html */ public interface JobService { /** * Creates job from settings DTO. + * https://docs.databricks.com/api/latest/jobs.html#create */ long createJob(JobSettingsDTO jobSettingsDTO) throws IOException, DatabricksRestException; /** * Deletes the job and sends an email to the addresses specified in email_notifications. - * + * https://docs.databricks.com/api/latest/jobs.html#delete * @param jobId The canonical identifier of the job to delete */ void deleteJob(long jobId) throws IOException, DatabricksRestException; /** * Deletes a job with a given name. Will fail if multiple jobs exist. - * + * https://docs.databricks.com/api/latest/jobs.html#delete * @param jobName - the job name to delete. */ void deleteJob(String jobName) throws IOException, DatabricksRestException; /** * Retrieves information about a single job. - * + * https://docs.databricks.com/api/latest/jobs.html#get * @param jobId The canonical identifier of the job to retrieve information about * @return POJO of the Job information */ @@ -63,7 +65,7 @@ public interface JobService { /** * Retrieves all jobs matching a name exactly. - * + * https://docs.databricks.com/api/latest/jobs.html#list * @param jobName - the full name of the job * @return - all jobs that match. */ @@ -72,12 +74,16 @@ public interface JobService { /** * Will try and get a job by name. If more then one job exists, it will fail. If no job exists it * will return null. + * Uses a list behind the scenes, so is O(n) with number of jobs. + * https://docs.databricks.com/api/latest/jobs.html#list */ JobDTO getJobByName(String jobName) throws IOException, DatabricksRestException; /** * Will try and get a job by name. If more then one job exists and failOnMultipleJobs set to * false, it will return the first job created. + * Uses a list behind the scenes, so is O(n) with number of jobs. + * https://docs.databricks.com/api/latest/jobs.html#list */ JobDTO getJobByName(String jobName, boolean failOnMultipleJobs) throws IOException, DatabricksRestException; @@ -85,21 +91,21 @@ JobDTO getJobByName(String jobName, boolean failOnMultipleJobs) /** * Retrieves all jobs with a name matching a given regex. - * + * Uses a list behind the scenes, so is O(n) with number of jobs. + * https://docs.databricks.com/api/latest/jobs.html#list * @param regex - the regex to earch for */ List getJobsByRegex(Pattern regex) throws IOException, DatabricksRestException; /** * Returns a list of all jobs that are active. - * + * https://docs.databricks.com/api/latest/jobs.html#list * @return A POJO of the Jobs */ JobsDTO listAllJobs() throws IOException, DatabricksRestException; /** * Produces the URL of a job given job id. - * * @param jobId The canonical identifier of the job to retrieve information about * @return URL of the job */ @@ -107,15 +113,15 @@ JobDTO getJobByName(String jobName, boolean failOnMultipleJobs) /** * Runs the job now. - * + * https://docs.databricks.com/api/latest/jobs.html#run-now * @param jobId The job to run * @return Returns the run_id and number_in_run of the triggered run */ RunNowDTO runJobNow(long jobId) throws DatabricksRestException, IOException; /** - * Runs the job now. - * + * Runs the job now. With specific notebook params. + * https://docs.databricks.com/api/latest/jobs.html#run-now * @param jobId The job to run * @param notebookParams A map from keys to values for jobs with notebook task * @return Returns the run_id and number_in_run of the triggered run @@ -123,12 +129,21 @@ JobDTO getJobByName(String jobName, boolean failOnMultipleJobs) RunNowDTO runJobNow(long jobId, Map notebookParams) throws DatabricksRestException, IOException; + /** + * Runs the job now. With specific run parameters. + * https://docs.databricks.com/api/latest/jobs.html#run-now + * @param jobId the job id to run + * @param params the run parameters + * @return the run now object + * @throws DatabricksRestException any db specific exceptions + * @throws IOException any other exceptions + */ RunNowDTO runJobNow(long jobId, RunParametersDTO params) throws DatabricksRestException, IOException; /** * Lists runs from most recently started to least. - * + * https://docs.databricks.com/api/latest/jobs.html#runs-list * @param jobId The job for which to list runs. If omitted, * the Jobs service will list runs from all jobs * @param activeOnly If true, lists active runs only; otherwise, lists both active and inactive @@ -146,7 +161,7 @@ RunsDTO listRuns(Long jobId, Boolean activeOnly, Integer offset, Integer limit) /** * Retrieves the metadata of a run. - * + * https://docs.databricks.com/api/latest/jobs.html#runs-get * @param runId The canonical identifier of the run for which to retrieve the metadata * @return Returns the metadata of the specified run */ @@ -155,11 +170,19 @@ RunsDTO listRuns(Long jobId, Boolean activeOnly, Integer offset, Integer limit) /** * Cancels a run. The run is canceled asynchronously, so when this request completes, the run may * still be running. The run will be terminated shortly. - * + * https://docs.databricks.com/api/latest/jobs.html#runs-cancel * @param runId The desired run to cancel */ void cancelRun(long runId) throws IOException, DatabricksRestException; + /** + * "resets" or "edits" a job definition. + * https://docs.databricks.com/api/latest/jobs.html#reset + * @param jobId the job to edit + * @param jobSettings the settings to change the job to + * @throws IOException any other errors + * @throws DatabricksRestException any specific db errors + */ void reset(long jobId, JobSettingsDTO jobSettings) throws IOException, DatabricksRestException; @@ -172,6 +195,11 @@ RunsDTO listRuns(Long jobId, Boolean activeOnly, Integer offset, Integer limit) * Given a job settings DTO object it will: - try to find the id by name. If multiple exist, it * will fail if the duplicate job name flag is set. Else it will update all jobs found. - create * the job if it doesn't exist - reset the job if it does exist. + * Uses a combination of + * If job doesn't exist: + * https://docs.databricks.com/api/latest/jobs.html#create + * If job exists: + * https://docs.databricks.com/api/latest/jobs.html#reset */ void upsertJob(JobSettingsDTO jobSettingsDTO, boolean failOnDuplicateJobNames) throws IOException, DatabricksRestException; diff --git a/src/main/java/com/edmunds/rest/databricks/service/JobServiceImpl.java b/src/main/java/com/edmunds/rest/databricks/service/JobServiceImpl.java index cf802c5..c098978 100644 --- a/src/main/java/com/edmunds/rest/databricks/service/JobServiceImpl.java +++ b/src/main/java/com/edmunds/rest/databricks/service/JobServiceImpl.java @@ -23,9 +23,9 @@ import com.edmunds.rest.databricks.DTO.RunNowDTO; import com.edmunds.rest.databricks.DTO.RunParametersDTO; import com.edmunds.rest.databricks.DTO.RunsDTO; -import com.edmunds.rest.databricks.DatabricksRestClient; import com.edmunds.rest.databricks.DatabricksRestException; import com.edmunds.rest.databricks.RequestMethod; +import com.edmunds.rest.databricks.restclient.DatabricksRestClient; import com.fasterxml.jackson.core.type.TypeReference; import java.io.IOException; import java.util.ArrayList; diff --git a/src/main/java/com/edmunds/rest/databricks/service/LibraryService.java b/src/main/java/com/edmunds/rest/databricks/service/LibraryService.java index 6241da7..1223be2 100644 --- a/src/main/java/com/edmunds/rest/databricks/service/LibraryService.java +++ b/src/main/java/com/edmunds/rest/databricks/service/LibraryService.java @@ -27,14 +27,45 @@ */ public interface LibraryService { + /** + * Gets the statuses of all clusters on a databricks instance. + * https://docs.databricks.com/api/latest/libraries.html#all-cluster-statuses + * @return the array of all cluster statuses. + * @throws IOException any other errors. + * @throws DatabricksRestException errors with db request. + */ ClusterLibraryStatusesDTO[] allClusterStatuses() throws IOException, DatabricksRestException; + /** + * Gets the status of a specific cluster. + * https://docs.databricks.com/api/latest/libraries.html#cluster-status + * @param clusterId the cluster id to get the status of + * @return the cluster status dto object + * @throws IOException any other errors + * @throws DatabricksRestException errors with specific request + */ ClusterLibraryStatusesDTO clusterStatus(String clusterId) throws IOException, DatabricksRestException; + /** + * Installs one or more libraries on a specific cluster. + * https://docs.databricks.com/api/latest/libraries.html#install + * @param clusterId the cluster to install to. + * @param libraries the libraries to install to this clsuter + * @throws IOException any other errors + * @throws DatabricksRestException errors with specific request + */ void install(String clusterId, LibraryDTO[] libraries) throws IOException, DatabricksRestException; + /** + * Uninstalls one or more libraries from a specific cluster. + * https://docs.databricks.com/api/latest/libraries.html#uninstall + * @param clusterId the cluster to uninstall a library from + * @param libraries the libraries to uninstall + * @throws IOException any other errors + * @throws DatabricksRestException any errors with request + */ void uninstall(String clusterId, LibraryDTO[] libraries) throws IOException, DatabricksRestException; } diff --git a/src/main/java/com/edmunds/rest/databricks/service/LibraryServiceImpl.java b/src/main/java/com/edmunds/rest/databricks/service/LibraryServiceImpl.java index a5e28c6..6722514 100644 --- a/src/main/java/com/edmunds/rest/databricks/service/LibraryServiceImpl.java +++ b/src/main/java/com/edmunds/rest/databricks/service/LibraryServiceImpl.java @@ -18,9 +18,9 @@ import com.edmunds.rest.databricks.DTO.ClusterLibraryStatusesDTO; import com.edmunds.rest.databricks.DTO.LibraryDTO; -import com.edmunds.rest.databricks.DatabricksRestClient; import com.edmunds.rest.databricks.DatabricksRestException; import com.edmunds.rest.databricks.RequestMethod; +import com.edmunds.rest.databricks.restclient.DatabricksRestClient; import com.fasterxml.jackson.core.type.TypeReference; import java.io.IOException; import java.util.HashMap; diff --git a/src/main/java/com/edmunds/rest/databricks/service/WorkspaceService.java b/src/main/java/com/edmunds/rest/databricks/service/WorkspaceService.java index 4883b26..3f1e37f 100644 --- a/src/main/java/com/edmunds/rest/databricks/service/WorkspaceService.java +++ b/src/main/java/com/edmunds/rest/databricks/service/WorkspaceService.java @@ -28,18 +28,64 @@ */ public interface WorkspaceService { + /** + * Deletes a specific workspace path. + * https://docs.databricks.com/api/latest/workspace.html#delete + * @param path the workspace path to delete + * @param recursive whether or not its recursive + * @throws IOException any other errors + * @throws DatabricksRestException databricks specific errors + */ void delete(String path, boolean recursive) throws IOException, DatabricksRestException; + /** + * Exports a workspace path FROM databricks to local machine. + * https://docs.databricks.com/api/latest/workspace.html#export + * @param exportWorkspaceRequest the request object + * @return the bytes of the export + * @throws IOException any other errors + * @throws DatabricksRestException any specific db errors + */ byte[] exportWorkspace(ExportWorkspaceRequest exportWorkspaceRequest) throws IOException, DatabricksRestException; + /** + * Gets the status of a specific workspace path. + * https://docs.databricks.com/api/latest/workspace.html#get-status + * @param path the workspace path + * @return the workspace info object + * @throws IOException any other errors + * @throws DatabricksRestException any specific db errors + */ ObjectInfoDTO getStatus(String path) throws IOException, DatabricksRestException; + /** + * Imports files INTO databricks from local machine. + * https://docs.databricks.com/api/latest/workspace.html#import + * @param importWorkspaceRequest the import workspace request + * @throws IOException any other errors + * @throws DatabricksRestException specific db exceptions + */ void importWorkspace(ImportWorkspaceRequest importWorkspaceRequest) throws IOException, DatabricksRestException; + /** + * Lists all objects in a specific db workspace path. + * https://docs.databricks.com/api/latest/workspace.html#list + * @param path the db workspace path + * @return the array of objects + * @throws IOException any other errors + * @throws DatabricksRestException specific db exceptions + */ ObjectInfoDTO[] listStatus(String path) throws IOException, DatabricksRestException; + /** + * Makes a path (and any parent paths if they do not exist) on db workspace. + * https://docs.databricks.com/api/latest/workspace.html#mkdirs + * @param path the db workspace path to create + * @throws IOException any other errors + * @throws DatabricksRestException any specific db exceptions + */ void mkdirs(String path) throws IOException, DatabricksRestException; } diff --git a/src/main/java/com/edmunds/rest/databricks/service/WorkspaceServiceImpl.java b/src/main/java/com/edmunds/rest/databricks/service/WorkspaceServiceImpl.java index 0c176f8..46ceec3 100644 --- a/src/main/java/com/edmunds/rest/databricks/service/WorkspaceServiceImpl.java +++ b/src/main/java/com/edmunds/rest/databricks/service/WorkspaceServiceImpl.java @@ -17,11 +17,11 @@ package com.edmunds.rest.databricks.service; import com.edmunds.rest.databricks.DTO.ObjectInfoDTO; -import com.edmunds.rest.databricks.DatabricksRestClient; import com.edmunds.rest.databricks.DatabricksRestException; import com.edmunds.rest.databricks.RequestMethod; import com.edmunds.rest.databricks.request.ExportWorkspaceRequest; import com.edmunds.rest.databricks.request.ImportWorkspaceRequest; +import com.edmunds.rest.databricks.restclient.DatabricksRestClient; import com.fasterxml.jackson.core.type.TypeReference; import java.io.IOException; import java.util.HashMap; diff --git a/src/test/java/com/edmunds/rest/databricks/DatabricksRestClientTest.java b/src/test/java/com/edmunds/rest/databricks/DatabricksRestClientTest.java index 305d55a..94fc3d9 100644 --- a/src/test/java/com/edmunds/rest/databricks/DatabricksRestClientTest.java +++ b/src/test/java/com/edmunds/rest/databricks/DatabricksRestClientTest.java @@ -16,30 +16,38 @@ package com.edmunds.rest.databricks; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.edmunds.rest.databricks.fixtures.DatabricksFixtures; +import com.edmunds.rest.databricks.restclient.DatabricksRestClient; import com.fasterxml.jackson.databind.ObjectMapper; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - import java.io.IOException; import java.util.HashMap; import java.util.Map; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; public class DatabricksRestClientTest { - private DatabricksRestClient client; private ObjectMapper mapper; @BeforeClass public void setUpOnce() throws IOException { - client = DatabricksFixtures.getDatabricksRestClient(); mapper = new ObjectMapper(); } - @Test - public void getRequest_whenCalled_returnsValidResponse() + @DataProvider(name = "Clients") + public Object[][] getClients() throws IOException { + return new Object[][]{ + {DatabricksFixtures.getDatabricksRestClient()}, + {DatabricksFixtures.createTokenAuthRestClient()}, + {DatabricksFixtures.createDatabricks425Client()} + }; + } + + @Test(dataProvider = "Clients") + public void getRequest_whenCalled_returnsValidResponse(DatabricksRestClient client) throws IOException, DatabricksRestException { byte[] responseBody = client.performQuery(RequestMethod.GET, "/libraries/all-cluster-statuses", null); Map response = this.mapper.readValue(responseBody, Map.class); @@ -48,8 +56,8 @@ public void getRequest_whenCalled_returnsValidResponse() } // I ignored this test as this run id doesn't appear to be available anymore. - @Test(enabled = false) - public void getRequest_whenCalledWithData_returnsValidResponse() + @Test(enabled = false, dataProvider = "Clients") + public void getRequest_whenCalledWithData_returnsValidResponse(DatabricksRestClient client) throws IOException, DatabricksRestException { int id = 1452843; Map data = new HashMap<>(); @@ -62,18 +70,20 @@ public void getRequest_whenCalledWithData_returnsValidResponse() assertEquals(result, result); } - @Test(expectedExceptions = DatabricksRestException.class, expectedExceptionsMessageRegExp = ".*ENDPOINT_NOT_FOUND" + + @Test(dataProvider = "Clients", expectedExceptions = DatabricksRestException.class, expectedExceptionsMessageRegExp = + ".*ENDPOINT_NOT_FOUND" + ".*") - public void performQuery_withInvalidPath_throwsDatabricksRestException() throws Exception { + public void performQuery_withInvalidPath_throwsDatabricksRestException( + DatabricksRestClient client) throws Exception { client.performQuery(RequestMethod.GET, "/fake_path", null); } - @Test(expectedExceptions = DatabricksRestException.class, expectedExceptionsMessageRegExp = ".*INVALID_PARAMETER_VALUE.*") - public void performQuery_withInvalidParameter_throwsDatabricksRestException() throws Exception { + @Test(dataProvider = "Clients", expectedExceptions = DatabricksRestException.class, expectedExceptionsMessageRegExp = ".*INVALID_PARAMETER_VALUE.*") + public void performQuery_withInvalidParameter_throwsDatabricksRestException( + DatabricksRestClient client) throws Exception { client.performQuery(RequestMethod.GET, "/clusters/get", null); } - @Test public void performQuery_retry_when_404() throws Exception { DatabricksRestClient notFoundClient = DatabricksFixtures.createDatabricksRestClientWithRetryCode(404); @@ -97,5 +107,4 @@ public void performQuery_client425_retry_when_404() throws Exception { assertEquals(retryStrategy.getExecuteCount(), 2); } } - } diff --git a/src/test/java/com/edmunds/rest/databricks/DatabricksServiceFactoryTest.java b/src/test/java/com/edmunds/rest/databricks/DatabricksServiceFactoryTest.java index bb11a09..eb7c86d 100644 --- a/src/test/java/com/edmunds/rest/databricks/DatabricksServiceFactoryTest.java +++ b/src/test/java/com/edmunds/rest/databricks/DatabricksServiceFactoryTest.java @@ -16,28 +16,42 @@ package com.edmunds.rest.databricks; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - import static org.testng.Assert.assertNotNull; +import org.testng.annotations.Test; + /** * Created by shong on 7/21/16. */ public class DatabricksServiceFactoryTest { - private DatabricksServiceFactory factory; - @BeforeClass - public void setUpOnce() { - factory = new DatabricksServiceFactory("", "", ""); + @Test + public void testBuilder_withTokens() { + DatabricksServiceFactory serviceFactory = + DatabricksServiceFactory.Builder + .createServiceFactoryWithTokenAuthentication("myToken", "myHost") + .withMaxRetries(5) + .withRetryInterval(10000L) + .build(); + assertNotNull(serviceFactory.getJobService()); + assertNotNull(serviceFactory.getLibraryService()); + assertNotNull(serviceFactory.getDbfsService()); + assertNotNull(serviceFactory.getJobService()); + assertNotNull(serviceFactory.getWorkspaceService()); } @Test - public void getService_whenCalled_returnsANotNullObject() { - assertNotNull(factory.getClusterService()); - assertNotNull(factory.getLibraryService()); - assertNotNull(factory.getDbfsService()); - assertNotNull(factory.getJobService()); - assertNotNull(factory.getWorkspaceService()); + public void testBuilder_withPassword() { + DatabricksServiceFactory serviceFactory = + DatabricksServiceFactory.Builder + .createServiceFactoryWithUserPasswordAuthentication("myUser", "myPassword", "myHost") + .withMaxRetries(5) + .withRetryInterval(10000L) + .build(); + assertNotNull(serviceFactory.getJobService()); + assertNotNull(serviceFactory.getLibraryService()); + assertNotNull(serviceFactory.getDbfsService()); + assertNotNull(serviceFactory.getJobService()); + assertNotNull(serviceFactory.getWorkspaceService()); } } diff --git a/src/test/java/com/edmunds/rest/databricks/fixtures/DatabricksFixtures.java b/src/test/java/com/edmunds/rest/databricks/fixtures/DatabricksFixtures.java index 455d88c..1f7e992 100644 --- a/src/test/java/com/edmunds/rest/databricks/fixtures/DatabricksFixtures.java +++ b/src/test/java/com/edmunds/rest/databricks/fixtures/DatabricksFixtures.java @@ -16,12 +16,11 @@ package com.edmunds.rest.databricks.fixtures; -import com.edmunds.rest.databricks.DatabricksRestClient; -import com.edmunds.rest.databricks.DatabricksRestClientImpl; -import com.edmunds.rest.databricks.DatabricksRestClientImpl425; import com.edmunds.rest.databricks.DatabricksServiceFactory; import com.edmunds.rest.databricks.HttpServiceUnavailableRetryStrategy; - +import com.edmunds.rest.databricks.restclient.DatabricksRestClient; +import com.edmunds.rest.databricks.restclient.DatabricksRestClientImpl; +import com.edmunds.rest.databricks.restclient.DatabricksRestClientImpl425; import java.io.IOException; import java.lang.reflect.Field; import java.util.Set; @@ -34,6 +33,7 @@ public class DatabricksFixtures { public static String USERNAME; public static String PASSWORD; public static String HOSTNAME; + public static String TOKEN; private static DatabricksRestClient client; private static DatabricksServiceFactory factory; @@ -41,11 +41,29 @@ public class DatabricksFixtures { USERNAME = System.getenv("DB_USER"); PASSWORD = System.getenv("DB_PASSWORD"); HOSTNAME = System.getenv("DB_URL"); + TOKEN = System.getenv("DB_TOKEN"); } public static DatabricksRestClient getDatabricksRestClient() throws IOException { if (client == null) { - client = new DatabricksRestClientImpl(USERNAME, PASSWORD, HOSTNAME, API_VERSION, 1, 10); + client = DatabricksRestClientImpl + .createClientWithUserPassword(USERNAME, PASSWORD, HOSTNAME, API_VERSION, 1, 10); + } + return client; + } + + public static DatabricksRestClient createTokenAuthRestClient() { + if (client == null) { + client = DatabricksRestClientImpl + .createClientWithTokenAuthentication(TOKEN, HOSTNAME, API_VERSION, 1, 10); + } + return client; + } + + public static DatabricksRestClient createDatabricks425Client() { + if (client == null) { + client = DatabricksRestClientImpl425 + .createClientWithUserPassword(USERNAME, PASSWORD, HOSTNAME, API_VERSION, 1, 10); } return client; } @@ -59,7 +77,8 @@ public static DatabricksRestClient getDatabricksRestClient() throws IOException */ public static DatabricksRestClient createDatabricksRestClientWithRetryCode(int httpStatusCode) throws Exception { - DatabricksRestClient databricksClient = new DatabricksRestClientImpl(USERNAME, PASSWORD, HOSTNAME, + DatabricksRestClient databricksClient = DatabricksRestClientImpl + .createClientWithUserPassword(USERNAME, PASSWORD, HOSTNAME, API_VERSION, 1, 10); addHttpStatus(databricksClient, httpStatusCode); @@ -69,7 +88,8 @@ public static DatabricksRestClient createDatabricksRestClientWithRetryCode(int h public static DatabricksRestClient createDatabricksRestClient425WithRetryCode(int httpStatusCode) throws Exception { - DatabricksRestClientImpl425 databricksClient = new DatabricksRestClientImpl425(USERNAME, PASSWORD, HOSTNAME, + DatabricksRestClientImpl425 databricksClient = DatabricksRestClientImpl425 + .createClientWithUserPassword(USERNAME, PASSWORD, HOSTNAME, API_VERSION, 1, 10); addHttpStatus(databricksClient, httpStatusCode); @@ -104,7 +124,8 @@ public static HttpServiceUnavailableRetryStrategy getHttpServiceUnavailableRetry public static DatabricksServiceFactory getDatabricksServiceFactory() { if (factory == null) { - factory = new DatabricksServiceFactory(USERNAME, PASSWORD, HOSTNAME); + factory = DatabricksServiceFactory.Builder + .createServiceFactoryWithUserPasswordAuthentication(USERNAME, PASSWORD, HOSTNAME).build(); } return factory;