Skip to content

Commit

Permalink
Merge pull request #140 from fauna/release-0.2.0-B1
Browse files Browse the repository at this point in the history
Release 0.2.0-B1 Fauna v10 JVM Beta!
  • Loading branch information
findgriffin authored Sep 11, 2024
2 parents 4eda6b1 + 8e26f2f commit 4288179
Show file tree
Hide file tree
Showing 224 changed files with 8,198 additions and 4,092 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/gradle-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
distribution: corretto

- name: Run Gradle test
run: ./gradlew test
run: ./gradlew test -i

- name: Run Gradle javadoc
run: ./gradlew javadoc
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# Compiled class file
*.class
src/main/resources/*

# Log file
*.log
Expand Down
231 changes: 201 additions & 30 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# The Official Java Driver for [Fauna](https://fauna.com). (alpha)
# The Official JVM Driver for [Fauna](https://fauna.com) (beta)

> [!CAUTION]
> This driver is currently in alpha and should not be used in production.
> This driver is currently in beta and should not be used in production.
The Fauna Java driver is a lightweight, open-source wrapper for Fauna's [HTTP
The Fauna JVM driver is a lightweight, open-source wrapper for Fauna's [HTTP
API](https://docs.fauna.com/fauna/current/reference/http/reference/). You can
use the driver to run FQL queries and get results from a Java application.

Expand All @@ -20,9 +20,10 @@ versions of FQL. To query your databases with earlier API versions, use the
- Java 11 or later


## Javadocs
## API reference

API reference documentation is available in the [Javadocs](https://fauna.github.io/fauna-jvm/latest/).
API reference documentation for the driver is available at
https://fauna.github.io/fauna-jvm/. The docs are generated using Javadoc.


## Installation
Expand Down Expand Up @@ -74,31 +75,27 @@ package org.example;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;

import com.fauna.annotation.FaunaField;
import com.fauna.annotation.FaunaObject;
import com.fauna.client.Fauna;
import com.fauna.client.FaunaClient;
import com.fauna.client.FaunaConfig;
import com.fauna.exception.FaunaException;
import com.fauna.query.builder.Query;
import com.fauna.response.QuerySuccess;
import com.fauna.serialization.generic.PageOf;
import com.fauna.types.Page;

import static com.fauna.codec.Generic.pageOf;
import static com.fauna.query.builder.Query.fql;


public class App {

// Define class for `Product` documents
// in expected results.
@FaunaObject
public static class Product {
@FaunaField(name = "name")
public String name;

@FaunaField(name = "description")
public String description;

@FaunaField(name = "price")
public double price;
public Integer price;
}

public static void main(String[] args) {
Expand All @@ -108,7 +105,7 @@ public class App {
FaunaClient client = Fauna.client();

// Compose a query.
Query query = Query.fql("""
Query query = fql("""
Product.sortedByPriceLowToHigh() {
name,
description,
Expand Down Expand Up @@ -136,29 +133,29 @@ public class App {
// Use `query()` to run a synchronous query.
// Synchronous queries block the current thread until the query completes.
// Accepts the query, expected result class, and a nullable set of query options.
QuerySuccess<Page<Product>> result = client.query(query, new PageOf<>(Product.class));
QuerySuccess<Page<Product>> result = client.query(query, pageOf(Product.class));
printResults(result.getData());
}

private static void runAsynchronousQuery(FaunaClient client, Query query) throws ExecutionException, InterruptedException {
// Use `asyncQuery()` to run an asynchronous, non-blocking query.
// Accepts the query, expected result class, and a nullable set of query options.
CompletableFuture<QuerySuccess<Page<Product>>> futureResult = client.asyncQuery(query, new PageOf<>(Product.class));
CompletableFuture<QuerySuccess<Page<Product>>> futureResult = client.asyncQuery(query, pageOf(Product.class));

QuerySuccess<Page<Product>> result = futureResult.get();
printResults(result.getData());
}

// Iterate through the products in the page.
private static void printResults(Page<Product> page) {
for (Product product : page.data()) {
for (Product product : page.getData()) {
System.out.println("Name: " + product.name);
System.out.println("Description: " + product.description);
System.out.println("Price: " + product.price);
System.out.println("--------");
}
// Print the `after` cursor to paginate through results.
System.out.println("After: " + page.after());
System.out.println("After: " + page.getAfter());
}
}
```
Expand Down Expand Up @@ -219,24 +216,50 @@ Use `fql` templates to compose FQL queries. To run the query, pass the template
and an expected result class to `query()` or `asyncQuery()`:

```java
Query query = Query.fql("Product.sortedByPriceLowToHigh()");
QuerySuccess<Page<Product>> result = client.query(query, new PageOf<>(Product.class));
Query query = fql("Product.sortedByPriceLowToHigh()");
QuerySuccess<Page<Product>> result = client.query(query, pageOf(Product.class));
```

You can also pass a nullable set of [query options](#query-options) to `query()`
or `asyncQuery()`. These options control how the query runs in Fauna. See [Query
options](#query-options).


### Define a result class
### Define a custom class for your data

Use annotations to map a Java class to a Fauna document or object shape:

```java
import com.fauna.annotation.FaunaField;
import com.fauna.annotation.FaunaId;

class Person {

@FaunaId
private String id;

You can use the `com.fauna.annotation` package to define a result class for a
Fauna document. The package provides annotations like `@FaunaObject` and
`@FaunaField` to map Fauna documents to Java classes and fields.
private String firstName;

Use the `com.fauna.serialization` package to handle deserialization for
generics, such as `PageOf`, `ListOf`, and `MapOf`.
@FaunaField( name = "dob")
private String dateOfBirth;
}
```

You can use the `com.fauna.annotation` package to modify encoding and decoding of
specific fields in classes used as arguments and results of queries.
* `@FaunaId`: Should only be used once per class and be associated with a field named `id` that represents the Fauna document ID. It's not encoded unless the `isClientGenerated` flag is `true`.
* `@FaunaTs`: Should only be used once per class and be associated with a field named `ts` that represents the timestamp of a document. It's not encoded.
* `@FaunaColl`: Typically goes unmodeled. Should only be used once per class and be associated with a field named `coll` that represents the collection field of a document. It will never be encoded.
* `@FaunaField`: Can be associated with any field to override its name in Fauna.
* `@FaunaIgnore`: Can be used to ignore fields during encoding and decoding.

Use classes in the `com.fauna.codec` package to handle type erasure when the top-level result
of a query is a generic, including:
* `PageOf<T>` where `T` is the element type.
* `ListOf<T>` where `T` is the element type.
* `MapOf<T>` where `T` is the value type.
* `OptionalOf<T>` where `T` is the value type.
* `NullableDocumentOf<T>` where `T` is the value type. This is specifically for cases when you return a Fauna document that may be null and want to receive a concrete `NullDocument<T>` or `NonNullDocument<T>` instead of catching a `NullDocumentException`.

### Variable interpolation

Expand Down Expand Up @@ -275,7 +298,7 @@ import com.fauna.client.PageIterator;
public class App {
public static void main(String[] args) {
FaunaClient client = Fauna.client();

// `paginate()` will make an async request to Fauna.
PageIterator<Product> iter1 = client.paginate(fql("Product.all()"), Product.class);

Expand All @@ -285,9 +308,9 @@ public class App {
List<Product> pageData = page.data();
// Do something with your data.
}

PageIterator<Product> iter2 = client.paginate(fql("Product.all()"), Product.class);

// Use the `flatten()` on PageIterator to iterate over every item in a set.
Iterator<Product> productIter = iter2.flatten();
List<Product> products = new ArrayList<>();
Expand Down Expand Up @@ -406,3 +429,151 @@ QueryOptions options = QueryOptions.builder()

QuerySuccess result = client.query(query, String.class, options);
```

## Event streaming

The driver supports [event streaming](https://docs.fauna.com/fauna/current/learn/streaming).

To get a stream token, append
[`toStream()`](https://docs.fauna.com/fauna/current/reference/reference/schema_entities/set/tostream)
or
[`changesOn()`](https://docs.fauna.com/fauna/current/reference/reference/schema_entities/set/changeson)
to a set from a [supported
source](https://docs.fauna.com/fauna/current/reference/streaming_reference/#supported-sources).

To start and subscribe to the stream, use a stream token to create a
`StreamRequest` and pass the `StreamRequest` to `stream()` or `asyncStream()`:

```java
// Get a stream token.
Query query = fql("Product.all().toStream() { name, stock }");
QuerySuccess<StreamTokenResponse> tokenResponse = client.query(query, StreamTokenResponse.class);
String streamToken = tokenResponse.getData().getToken();

// Create a StreamRequest.
StreamRequest request = new StreamRequest(streamToken);

// Use stream() when you want to ensure the stream is ready before proceeding
// with other operations, or when working in a synchronous context.
FaunaStream<Product> stream = client.stream(request, Product.class);

// Use asyncStream() when you want to start the stream operation without blocking,
// which is useful in asynchronous applications or when you need to perform other
// tasks while waiting for the stream to be established.
CompletableFuture<FaunaStream<Product>> futureStream = client.asyncStream(request, Product.class);
```

Alternatively, you can pass an FQL query that returns a stream token to `stream()` or
`asyncStream()`:

```java
Query query = fql("Product.all().toStream() { name, stock }");
// Create and subscribe to a stream in one step.
// stream() example:
FaunaStream<Product> stream = client.stream(query, Product.class);
// asyncStream() example:
CompletableFuture<FaunaStream<Product>> futureStream = client.asyncStream(query, Product.class);
```

### Create a subscriber class

The methods return a `FaunaStream` publisher that lets you handle events as they
arrive. Create a class with the `Flow.Subscriber` interface to process
events:

```java
package org.example;

import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Flow;
import java.util.concurrent.atomic.AtomicInteger;

import com.fauna.client.Fauna;
import com.fauna.client.FaunaClient;
import com.fauna.client.FaunaStream;
import com.fauna.exception.FaunaException;
import static com.fauna.query.builder.Query.fql;
import com.fauna.response.StreamEvent;

// Import the Product class for event data.
import org.example.Product;

public class App {
public static void main(String[] args) throws InterruptedException {
try {
FaunaClient client = Fauna.client();

// Create a stream of all products. Project the name and stock.
FaunaStream<Product> stream = client.stream(fql("Product.all().toStream() { name, stock }"), Product.class);

// Create a subscriber to handle stream events.
ProductSubscriber subscriber = new ProductSubscriber();
stream.subscribe(subscriber);

// Wait for the subscriber to complete.
subscriber.awaitCompletion();
} catch (FaunaException e) {
System.err.println("Fauna error occurred: " + e.getMessage());
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}

static class ProductSubscriber implements Flow.Subscriber<StreamEvent<Product>> {
private final AtomicInteger eventCount = new AtomicInteger(0);
private Flow.Subscription subscription;
private final int maxEvents;
private final CountDownLatch completionLatch = new CountDownLatch(1);

public ProductSubscriber() {
// Stream closes after 3 events.
this.maxEvents = 3;
}

@Override
public void onSubscribe(Flow.Subscription subscription) {
this.subscription = subscription;
subscription.request(1);
}

@Override
public void onNext(StreamEvent<Product> event) {
// Handle each event...
int count = eventCount.incrementAndGet();
System.out.println("Received event " + count + ":");
System.out.println(" Cursor: " + event.getCursor());
System.out.println(" Timestamp: " + event.getTimestamp());
System.out.println(" Data: " + event.getData().orElse(null));

if (count >= maxEvents) {
System.out.println("Closing stream after " + maxEvents + " events");
subscription.cancel();
completionLatch.countDown();
} else {
subscription.request(1);
}
}

@Override
public void onError(Throwable throwable) {
System.err.println("Error in stream: " + throwable.getMessage());
completionLatch.countDown();
}

@Override
public void onComplete() {
System.out.println("Stream completed.");
completionLatch.countDown();
}

public int getEventCount() {
return eventCount.get();
}

public void awaitCompletion() throws InterruptedException {
completionLatch.await();
}
}
}
```
6 changes: 4 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,14 @@ dependencies {
implementation "com.fasterxml.jackson.core:jackson-core:${jacksonVersion}"
implementation "com.fasterxml.jackson.core:jackson-databind:${jacksonVersion}"

testImplementation 'junit:junit:4.13.2'
testImplementation "org.junit.jupiter:junit-jupiter-api:${junitVersion}"
testImplementation "org.junit.jupiter:junit-jupiter-params:${junitVersion}"
testImplementation 'junit:junit:4.13.2'
testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:${junitVersion}"
testImplementation "org.mockito:mockito-core:${mockitoVersion}"
testImplementation "org.mockito:mockito-junit-jupiter:${mockitoVersion}"
testImplementation "com.fasterxml.jackson.datatype:jackson-datatype-jsr310:${jacksonVersion}"

testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:${junitVersion}"
}

mavenPublishing {
Expand Down
1 change: 1 addition & 0 deletions concourse/scripts/body_gtm.dat
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
<!-- Google Tag Manager (noscript) --><noscript><iframe src="https:\/\/www.googletagmanager.com\/ns.html?id=GTM-TGTQJ7M" height="0" width="0" style="display:none;visibility:hidden"><\/iframe><\/noscript><!-- End Google Tag Manager (noscript) -->
1 change: 1 addition & 0 deletions concourse/scripts/head_gtm.dat
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
<!-- Google Tag Manager --><script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src='https:\/\/www.googletagmanager.com\/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);})(window,document,'script','dataLayer','GTM-TGTQJ7M');<\/script><!-- End Google Tag Manager -->
14 changes: 8 additions & 6 deletions concourse/scripts/publish-docs.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,15 +35,17 @@ fi

cp -R "../fauna-jvm-repository/build/docs/javadoc" "$PACKAGE_VERSION"

# echo "Adding google manager tag to head..."
echo "Adding google manager tag to head..."

# HEAD_GTM=$(cat ../fauna-jvm-repository/concourse/scripts/head_gtm.dat)
# sed -i '' "0,/<\/title>/{s/<\/title>/<\/title>${HEAD_GTM}/}" "$PACKAGE_VERSION/index.html"
HEAD_GTM=$(cat ../fauna-jvm-repository/concourse/scripts/head_gtm.dat)
sed -i.bak "0,/<\/title>/{s/<\/title>/<\/title>${HEAD_GTM}/}" ./$PACKAGE_VERSION/index.html

# echo "Adding google manager tag to body..."
echo "Adding google manager tag to body..."

# BODY_GTM=$(cat ../fauna-jvm-repository/concourse/scripts/body_gtm.dat)
# sed -i '' "0,/<body>/{s/<body>/<body>${BODY_GTM}/}" "$PACKAGE_VERSION/index.html"
BODY_GTM=$(cat ../fauna-jvm-repository/concourse/scripts/body_gtm.dat)
sed -i.bak "0,/<body>/{s/<body>/<body>${BODY_GTM}/}" ./$PACKAGE_VERSION/index.html

rm ./$PACKAGE_VERSION/index.html.bak

echo "Updating 'latest' symlink to point to $PACKAGE_VERSION"
ln -sfn "$PACKAGE_VERSION" latest
Expand Down
Loading

0 comments on commit 4288179

Please sign in to comment.