Skip to content

Commit

Permalink
Adapt S3 AWS error handling; see https://imagesc.zulipchat.com/#narro…
Browse files Browse the repository at this point in the history
  • Loading branch information
tischi committed Nov 24, 2023
1 parent 9425b51 commit a32588f
Show file tree
Hide file tree
Showing 4 changed files with 120 additions and 94 deletions.
43 changes: 26 additions & 17 deletions src/main/java/org/embl/mobie/io/SpimDataOpener.java
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@

public class SpimDataOpener {

public static final String ERROR_WHILE_TRYING_TO_READ_SPIM_DATA = "Error while trying to read spimData";
public static final String ERROR_WHILE_TRYING_TO_READ_SPIM_DATA = "Error while trying to read ";

public SpimDataOpener() {
}
Expand Down Expand Up @@ -192,67 +192,76 @@ private SpimData openBdvXml(String path) throws SpimDataException {
try {
InputStream stream = IOHelper.getInputStream(path);
return new CustomXmlIoSpimData().loadFromStream(stream, path);
} catch (SpimDataException | IOException e) {
throw new SpimDataException(ERROR_WHILE_TRYING_TO_READ_SPIM_DATA + e.getMessage());
} catch (Exception e) {
System.err.println("Error opening " + path);
throw new RuntimeException(e);
}
}

private SpimData openBdvN5(String path, SharedQueue queue) throws SpimDataException {
try {
return N5Opener.openFile(path, queue);
} catch (IOException e) {
throw new SpimDataException(ERROR_WHILE_TRYING_TO_READ_SPIM_DATA + e.getMessage());
} catch (Exception e) {
System.err.println("Error opening " + path);
throw new RuntimeException(e);
}
}

private SpimData openBdvN5S3(String path, SharedQueue queue) throws SpimDataException {
try {
return N5S3Opener.readURL(path, queue);
} catch (IOException e) {
throw new SpimDataException(ERROR_WHILE_TRYING_TO_READ_SPIM_DATA + e.getMessage());
} catch (Exception e) {
System.err.println("Error opening " + path);
throw new RuntimeException(e);
}
}

private SpimData openOmeZarr(String path) throws SpimDataException {
try {
return OMEZarrOpener.openFile(path);
} catch (IOException e) {
throw new SpimDataException(ERROR_WHILE_TRYING_TO_READ_SPIM_DATA + e.getMessage());
} catch (Exception e) {
System.err.println("Error opening " + path);
throw new RuntimeException(e);
}
}

private SpimData openOmeZarr(String path, SharedQueue sharedQueue) throws SpimDataException {
try {
return OMEZarrOpener.openFile(path, sharedQueue);
} catch (IOException e) {
throw new SpimDataException(ERROR_WHILE_TRYING_TO_READ_SPIM_DATA + e.getMessage());
} catch (Exception e) {
System.err.println("Error opening " + path);
throw new RuntimeException(e);
}
}

private SpimData openOmeZarrS3(String path) throws SpimDataException {
try {
return OMEZarrS3Opener.readURL(path);
} catch (IOException e) {
throw new SpimDataException(ERROR_WHILE_TRYING_TO_READ_SPIM_DATA + e.getMessage());
} catch (Exception e) {
System.err.println("Error opening " + path);
throw new RuntimeException(e);
}
}

private SpimData openOmeZarrS3(String path, SharedQueue sharedQueue) throws SpimDataException {
try {
return OMEZarrS3Opener.readURL(path, sharedQueue);
} catch (IOException e) {
throw new SpimDataException(ERROR_WHILE_TRYING_TO_READ_SPIM_DATA + e.getMessage());
} catch (Exception e) {
System.err.println("Error opening " + path);
throw new RuntimeException(e);
}
}

private SpimData openOpenOrganelleS3(String path) throws SpimDataException {
try {
return OpenOrganelleS3Opener.readURL(path);
} catch (IOException e) {
throw new SpimDataException(ERROR_WHILE_TRYING_TO_READ_SPIM_DATA + e.getMessage());
} catch (Exception e) {
System.err.println("Error opening " + path);
throw new RuntimeException(e);
}
}

@NotNull
private SpimData openBdvOmeZarrS3(String path, SharedQueue queue) {
//TODO: finish bug fixing
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.ExecutionException;

import org.embl.mobie.io.ome.zarr.util.N5ZarrImageReader;
import org.embl.mobie.io.ome.zarr.util.N5ZarrImageReaderHelper;
Expand Down Expand Up @@ -218,23 +219,6 @@ public boolean datasetExists(final String pathName) throws IOException {
return readJson(path) != null;
}

/**
* CHANGE: rename to not overwrite the AWS list objects version
*
* @returns false if the group or dataset does not exist but also if the
* attempt to access
*/
// @Override
public boolean zarrExists(final String pathName) {
try {
return groupExists(pathName) || datasetExists(pathName);
} catch (final IOException e) {
e.printStackTrace();
return false;
}
}


/**
* If {@link #mapN5DatasetAttributes} is set, dataset attributes will
* override attributes with the same key.
Expand All @@ -254,9 +238,16 @@ public HashMap<String, JsonElement> getAttributes(final String pathName) throws
throw new IOException("Error while getting datasets dimensions", e);
}

if (mapN5DatasetAttributes && datasetExists(pathName)) {
final DatasetAttributes datasetAttributes = getZArrayAttributes(pathName).getDatasetAttributes();
n5ZarrImageReaderHelper.putAttributes(attributes, datasetAttributes);
if (mapN5DatasetAttributes) {
try
{
final DatasetAttributes datasetAttributes = getZArrayAttributes( pathName ).getDatasetAttributes();
n5ZarrImageReaderHelper.putAttributes( attributes, datasetAttributes );
}
catch ( Exception e )
{
// no datasetAttributes found here
}
}
return attributes;
}
Expand Down Expand Up @@ -287,11 +278,8 @@ public DataBlock<?> readBlock(
try (final InputStream in = this.readS3Object(dataBlockKey)) {
return readBlock(in, zarrDatasetAttributes, gridPosition);
}
} catch (AmazonS3Exception ase) {
if ("NoSuchKey".equals(ase.getErrorCode())) {
return null;
}
throw ase;
} catch (Exception e) {
return null;
}
}

Expand All @@ -302,14 +290,11 @@ public DataBlock<?> readBlock(
* @param objectPath
* @return null if the object does not exist, otherwise the loaded attributes.
*/
public HashMap<String, JsonElement> readJson(String objectPath) throws IOException {
public HashMap<String, JsonElement> readJson(String objectPath) {
try (final InputStream in = this.readS3Object(objectPath)) {
return GsonAttributesParser.readAttributes(new InputStreamReader(in), gson);
} catch (AmazonS3Exception ase) {
if (ase.getErrorCode().equals("NoSuchKey"))
return null;
else
throw ase;
} catch (Exception e) {
return null;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -528,7 +528,6 @@ public void setAttributes(
gson));

if (mapN5DatasetAttributes && datasetExists(pathName)) {

attributes = new HashMap<>(attributes);
ZArrayAttributes zArrayAttributes = getZArrayAttributes(pathName);
long[] shape;
Expand Down
123 changes: 78 additions & 45 deletions src/main/java/org/embl/mobie/io/util/S3Utils.java
Original file line number Diff line number Diff line change
Expand Up @@ -55,60 +55,93 @@

public abstract class S3Utils {
private static String[] s3AccessAndSecretKey;
private static boolean useCredentialsChain;

public static void setS3AccessAndSecretKey(String[] s3AccessAndSecretKey) {
S3Utils.s3AccessAndSecretKey = s3AccessAndSecretKey;
}

public static AmazonS3 getS3Client(String endpoint, String region, String bucket) {

// look for credentials at common places
// on the client computer
public static void useS3Credenditals( boolean b ) {
useCredentialsChain = b;
}

// TODO: the bucket could be removed here
public static AmazonS3 getS3Client(String endpoint, String region, String bucket)
{
final AwsClientBuilder.EndpointConfiguration endpointConfiguration = new AwsClientBuilder.EndpointConfiguration(endpoint, region);

// first we create a client with anonymous credentials and see if we can access the bucket like this
AWSCredentialsProvider credentialsProvider = new AWSStaticCredentialsProvider(new AnonymousAWSCredentials());
AmazonS3 s3 = AmazonS3ClientBuilder
.standard()
.withPathStyleAccessEnabled(true)
.withEndpointConfiguration(endpointConfiguration)
.withCredentials(credentialsProvider)
.build();

// check if we can access the access
HeadBucketRequest headBucketRequest = new HeadBucketRequest(bucket);
try {
HeadBucketResult headBucketResult = s3.headBucket(headBucketRequest);
return s3;
} catch (AmazonServiceException e) {
switch (e.getStatusCode()) {
// if we get a 403 response (access forbidden), we try again with credentials
case HttpStatusCodes.STATUS_CODE_FORBIDDEN:
if (s3AccessAndSecretKey != null) {
// use the given credentials
final BasicAWSCredentials credentials = new BasicAWSCredentials(s3AccessAndSecretKey[0], s3AccessAndSecretKey[1]);
credentialsProvider = new AWSStaticCredentialsProvider(credentials);
} else {
// look for credentials at other places
credentialsProvider = new DefaultAWSCredentialsProviderChain();
checkCredentialsExistence(credentialsProvider);
}
s3 = AmazonS3ClientBuilder
.standard()
.withPathStyleAccessEnabled(true)
.withEndpointConfiguration(endpointConfiguration)
.withCredentials(credentialsProvider)
.build();
// check if we have access permissions now
try {
HeadBucketResult headBucketResult = s3.headBucket(headBucketRequest);
} catch (AmazonServiceException e2) {
throw e2;
}
return s3;
// otherwise the bucket does not exist or has been permanently moved; throw the exception
default:
throw e;
}
// Configure the credentials
AWSCredentialsProvider credentialsProvider;
if (s3AccessAndSecretKey != null)
{
// Use given credentials
final BasicAWSCredentials credentials = new BasicAWSCredentials(s3AccessAndSecretKey[0], s3AccessAndSecretKey[1]);
credentialsProvider = new AWSStaticCredentialsProvider(credentials);
}
else if (useCredentialsChain)
{
// Look for credentials
credentialsProvider = new DefaultAWSCredentialsProviderChain();
checkCredentialsExistence(credentialsProvider);
}
else
{
// Anonymous
credentialsProvider = new AWSStaticCredentialsProvider( new AnonymousAWSCredentials() );
}

// Create the access
AmazonS3 s3 = AmazonS3ClientBuilder
.standard()
.withPathStyleAccessEnabled( true )
.withEndpointConfiguration( endpointConfiguration )
.withCredentials( credentialsProvider )
.build();

return s3;
}
// check if we can access
// this does not work for all S3 implementations
// https://imagesc.zulipchat.com/#narrow/stream/328251-NGFF/topic/S3.20Zarr.20access.20issue
// HeadBucketRequest headBucketRequest = new HeadBucketRequest(bucket);
// try {
// HeadBucketResult headBucketResult = s3.headBucket(headBucketRequest);
// return s3;
// } catch (AmazonServiceException e) {
// switch (e.getStatusCode()) {
// // if we get a 403 response (access forbidden), we try again with credentials
// case HttpStatusCodes.STATUS_CODE_FORBIDDEN:
// if (s3AccessAndSecretKey != null) {
// // use the given credentials
// final BasicAWSCredentials credentials = new BasicAWSCredentials(s3AccessAndSecretKey[0], s3AccessAndSecretKey[1]);
// credentialsProvider = new AWSStaticCredentialsProvider(credentials);
// } else {
// // look for credentials at other places
// credentialsProvider = new DefaultAWSCredentialsProviderChain();
// checkCredentialsExistence(credentialsProvider);
// }
// s3 = AmazonS3ClientBuilder
// .standard()
// .withPathStyleAccessEnabled(true)
// .withEndpointConfiguration(endpointConfiguration)
// .withCredentials(credentialsProvider)
// .build();
// // check if we have access permissions now
// try {
// HeadBucketResult headBucketResult = s3.headBucket(headBucketRequest);
// } catch (AmazonServiceException e2) {
// throw e2;
// }
// return s3;
// // otherwise the bucket does not exist or has been permanently moved; throw the exception
// default:
// throw e;
// }
// }
// }

public static AmazonS3 getS3Client(String uri) {
final String endpoint = getEndpoint(uri);
Expand Down

0 comments on commit a32588f

Please sign in to comment.