diff --git a/.secrets.baseline b/.secrets.baseline
index c4f0be865..6b5adfe55 100644
--- a/.secrets.baseline
+++ b/.secrets.baseline
@@ -125,9 +125,9 @@
{
"path": "detect_secrets.filters.regex.should_exclude_file",
"pattern": [
- "\\.secrets..*",
"\\.git.*",
"\\.pre-commit-config\\.yaml",
+ "\\.secrets..*",
"target"
]
}
@@ -227,7 +227,7 @@
"filename": "src/site/markdown/operate/index.md",
"hashed_secret": "3a6d7aa49a8e4a2fe32a5cd0e53da9cb96bd8d29",
"is_verified": false,
- "line_number": 968,
+ "line_number": 991,
"is_secret": false
}
],
@@ -273,7 +273,7 @@
"filename": "src/site/xdoc/operate/errors.xml.vm",
"hashed_secret": "4fb813c304003b3813b35a85f05b7cb0c3994cc1",
"is_verified": false,
- "line_number": 173,
+ "line_number": 179,
"is_secret": false
}
],
@@ -388,17 +388,7 @@
"line_number": 76,
"is_secret": false
}
- ],
- "src/test/resources/riut/auth.txt": [
- {
- "type": "Secret Keyword",
- "filename": "src/test/resources/riut/auth.txt",
- "hashed_secret": "d033e22ae348aeb5660fc2140aec35850c4da997",
- "is_verified": false,
- "line_number": 3,
- "is_secret": false
- }
]
},
- "generated_at": "2024-05-22T15:27:52Z"
+ "generated_at": "2024-11-01T18:54:24Z"
}
diff --git a/pom.xml b/pom.xml
index e300d5879..f77bdcdff 100644
--- a/pom.xml
+++ b/pom.xml
@@ -367,19 +367,51 @@
5.4.1
- org.opensearch.client
- opensearch-rest-client
- 2.16.0
+ org.mp4parser
+ isoparser
+ 1.9.56
+
+
+
+ gov.nasa.pds
+ registry-common
+ 2.1.0-SNAPSHOT
+
+
+ jakarta.xml.bind
+ jakarta.xml.bind-api
+ 4.0.1
+
+
+ com.sun.xml.bind
+ jaxb-impl
+ 4.0.4
+
+
+ jakarta.activation
+ jakarta.activation-api
+ 2.1.2
+
+
+
+ org.apache.httpcomponents.client5
+ httpclient5
+ 5.3.1
org.opensearch.client
- opensearch-rest-high-level-client
- 2.5.0
+ opensearch-java
+ 2.13.0
- org.mp4parser
- isoparser
- 1.9.56
+ software.amazon.awssdk
+ opensearch
+ 2.25.31
+
+
+ software.amazon.awssdk
+ apache-client
+ 2.25.31
diff --git a/src/main/java/gov/nasa/pds/validate/ri/AuthInformation.java b/src/main/java/gov/nasa/pds/validate/ri/AuthInformation.java
index d64588a55..af73c1916 100644
--- a/src/main/java/gov/nasa/pds/validate/ri/AuthInformation.java
+++ b/src/main/java/gov/nasa/pds/validate/ri/AuthInformation.java
@@ -1,102 +1,41 @@
package gov.nasa.pds.validate.ri;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.nio.charset.Charset;
-import java.util.Properties;
-import java.util.Scanner;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import org.w3c.dom.Document;
-import org.w3c.dom.NodeList;
-import org.xml.sax.SAXException;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.lang.NotImplementedException;
+import gov.nasa.pds.registry.common.ConnectionFactory;
+import gov.nasa.pds.registry.common.EstablishConnectionFactory;
public class AuthInformation {
- final public static AuthInformation NO_AUTH = new AuthInformation(false, "", "", "");
- final private boolean trustSelfSigned;
- final private String password;
- final private String url;
- final private String username;
-
- private AuthInformation(boolean tss, String pwd, String un, String url) {
- this.password = pwd;
- this.trustSelfSigned = tss;
- this.url = url;
- this.username = un;
+ final private String apiAuthFile;
+ final private String osAuthFile;
+ final private String regConn;
+ private transient ConnectionFactory factory = null;
+ private AuthInformation(String a, String A, String r) {
+ this.apiAuthFile = A;
+ this.osAuthFile = a;
+ this.regConn = r;
}
-
- public static AuthInformation buildFrom(String filename)
- throws IOException, ParserConfigurationException, SAXException {
- boolean tss;
- File file = new File(filename);
- Scanner textReader;
- String line = null, pwd, un, url;
-
- if (filename == null || filename.length() == 0)
- return NO_AUTH;
- if (!file.exists())
- throw new IOException("Filename '" + filename + "' does not exist");
-
- // Get the first non-comment line
- textReader = new Scanner(file, Charset.defaultCharset().name());
- while (textReader.hasNext() && line == null) {
- line = textReader.nextLine().strip();
- if (line.charAt(0) == '#')
- line = null;
- }
- textReader.close();
-
- // Determine which file processing to use
- if (line.startsWith("")) { // XML
- //
- DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
- Document document = builder.parse(file);
- NodeList registries = document.getElementsByTagName("registry");
-
- if (registries.getLength() != 1)
- throw new SAXException(
- "There should be one and only registry tag in the harvest config file but found "
- + registries.getLength());
- if (registries.item(0).getAttributes().getNamedItem("auth") == null)
- throw new SAXException("Requires an authorization file or 'auth' attribute on .");
-
- filename = registries.item(0).getAttributes().getNamedItem("auth").getNodeValue();
- url = registries.item(0).getAttributes().getNamedItem("url").getNodeValue();
- } else { // java property
- FileInputStream input = new FileInputStream(file);
- Properties properties = new Properties();
- properties.load(input);
- url = properties.getProperty("url");
- filename = properties.getProperty("credentials");
- input.close();
- }
-
- // Get credentials
- FileInputStream input = new FileInputStream(filename);
- Properties properties = new Properties();
- properties.load(input);
- pwd = properties.getProperty("password");
- tss = Boolean.valueOf(properties.getProperty("trust.self-signed", "false"));
- un = properties.getProperty("user");
- input.close();
- return new AuthInformation(tss, pwd, un, url);
- }
-
- public String getPassword() {
- return password;
- }
-
- public boolean getTrustSelfSigned() {
- return trustSelfSigned;
+ public static AuthInformation buildFrom(CommandLine cl) {
+ return new AuthInformation(
+ cl.getOptionValue("a",""),
+ cl.getOptionValue("A",""),
+ cl.getOptionValue("r",""));
}
-
- public String getUsername() {
- return username;
+ public synchronized ConnectionFactory getConnectionFactory() throws Exception {
+ if (this.factory == null) {
+ if (!this.apiAuthFile.isBlank()) {
+ throw new NotImplementedException();
+ }
+ if (!this.osAuthFile.isBlank()) {
+ this.factory = EstablishConnectionFactory.from(this.regConn, this.osAuthFile);
+ }
+ if (this.factory == null) {
+ throw new IllegalArgumentException("did not supply necessary arguments on the CLI");
+ }
+ }
+ return this.factory;
}
-
- public String getUrl() {
- return url;
+ public String getURL() {
+ return factory != null ? this.factory.toString() : "uninitialized connection factory";
}
}
diff --git a/src/main/java/gov/nasa/pds/validate/ri/CommandLineInterface.java b/src/main/java/gov/nasa/pds/validate/ri/CommandLineInterface.java
index b52bc5840..32060ba28 100644
--- a/src/main/java/gov/nasa/pds/validate/ri/CommandLineInterface.java
+++ b/src/main/java/gov/nasa/pds/validate/ri/CommandLineInterface.java
@@ -32,10 +32,13 @@ public CommandLineInterface() {
"file with the URL and credential content to have full (all product states) read-only access to the Registry Search API")
.hasArg(true).longOpt("auth-api").numberOfArgs(1).optionalArg(true).build()); */
this.opts.addOption(Option.builder("a").argName("auth-file").desc(
- "file with the URL and credential content to have full, direct read-only access to the Registry OpenSearch DB")
+ "file with the credential content to have full, direct read-only access to the Registry OpenSearch DB")
.hasArg(true).longOpt("auth-opensearch").numberOfArgs(1).optionalArg(true).build());
this.opts.addOption(Option.builder("h").desc("show this text and exit").hasArg(false)
.longOpt("help").optionalArg(true).build());
+ this.opts.addOption(Option.builder("r").argName("registry-connection").desc(
+ "URL point to the registry connection information usually of the form app://connection/direct/localhost.xml")
+ .hasArg(true).longOpt("registry-connection").numberOfArgs(1).optionalArg(true).build());
this.opts.addOption(Option.builder("t").argName("count").desc(
"process the lidvids in parallel (multiple threads) with this argument being the maximum number of threads")
.hasArg(true).longOpt("threads").optionalArg(true).build());
@@ -55,16 +58,11 @@ public void help() {
"Multiple arguments may be given in any order, for example:\n" +
" > validate-refs urn:nasa:pds:foo::1.0 label.xml urn:nasa:pds:bar::2.0 manifest.txt\n\n",
opts,
- "\nAn auth-file is either a text file of the Java property format " +
- "with two variables, 'url' and 'credentials': \n\n" +
- " - The 'url' property is the complete base URL to the Registry OpenSearch endpoint or Search API\n" +
- " * 'https://my-registry.es.amazonaws.com/_search'\n\n" +
- " - The 'credentials' is the path to:\n" +
- " * Harvest config file containing the necessary Registry OpenSearch authorization\n" +
- " \n" +
- " * Java Properties file with a 'user' and 'password' specified, for example: \n" +
- " user=janedoe\n" +
- " password=mypassword\n\n",
+ "\nAn auth-file is a text file of the Java property format " +
+ "with two variables, 'user' and 'password' for example: \n" +
+ " user=janedoe\n" +
+ " password=mypassword\n\n" +
+ "Both -a and -r are required.\n\n",
true);
}
@@ -89,17 +87,16 @@ public int process(String[] args)
loggerConfig.setLevel(Level.INFO);
ctx.updateLoggers();
- if (!cl.hasOption("a")) {
- throw new ParseException("Not yet implemented. Must provide OpenSearch Registry authorization information.");
- } else if (!cl.hasOption("A")) {
- log.warn("Using Registry OpenSearch Database to check references.");
+ if (cl.hasOption("A")) {
+ throw new ParseException("Not yet implemented. Must provide OpenSearch Registry authorization information through -a and -r.");
} else {
- /* not true statement until registry handles authentication
- * throw new ParseException("Must supply authorization file for access to either OpenSearch Database (auth-opensearch) or OpenSearch Registry (auth-api).");
- */
- throw new ParseException("Must define authorization file for access to OpenSearch Database (auth-opensearch).");
+ boolean both = cl.hasOption("a") && cl.hasOption("r");
+ if (!both) {
+ throw new ParseException("Both -a and -r must be given.");
+ } else {
+ log.warn("Using Registry OpenSearch Database to check references.");
+ }
}
-
if (cl.getArgList().size() < 1)
throw new ParseException("Must provide at least one LIDVID, Label file path, or manifest file path as a starting point.");
@@ -115,31 +112,17 @@ public int process(String[] args)
} else
this.log.info("lidvids will be sequentially processed.");
- try {
- DuplicateFileAreaFilenames scanner = new DuplicateFileAreaFilenames(
- AuthInformation.buildFrom(cl.getOptionValue("auth-api", "")),
- AuthInformation.buildFrom(cl.getOptionValue("auth-opensearch", "")));
- Engine engine = new Engine(cylinders, UserInput.toLidvids (cl.getArgList()),
- AuthInformation.buildFrom(cl.getOptionValue("auth-api", "")),
- AuthInformation.buildFrom(cl.getOptionValue("auth-opensearch", "")));
- this.log.info("Starting the duplicate filename in FileArea checks.");
- scanner.findDuplicatesInBackground();
- this.log.info("Starting the reference integrity checks.");
- engine.processQueueUntilEmpty();
- scanner.waitTillDone();
- this.broken = engine.getBroken();
- this.duplicates = scanner.getResults();
- this.total = engine.getTotal();
- } catch (IOException e) {
- this.log.fatal("Cannot process request because of IO problem.", e);
- throw e;
- } catch (ParserConfigurationException e) {
- this.log.fatal("Could not parse the harvest configuration file.", e);
- throw e;
- } catch (SAXException e) {
- this.log.fatal("Mal-formed harvest configuration file.", e);
- throw e;
- }
+ DuplicateFileAreaFilenames scanner = new DuplicateFileAreaFilenames(AuthInformation.buildFrom(cl));
+ Engine engine = new Engine(cylinders, UserInput.toLidvids (cl.getArgList()), AuthInformation.buildFrom(cl));
+ this.log.info("Starting the duplicate filename in FileArea checks.");
+ scanner.findDuplicatesInBackground();
+ this.log.info("Starting the reference integrity checks.");
+ engine.processQueueUntilEmpty();
+ scanner.waitTillDone();
+ this.broken = engine.getBroken();
+ this.duplicates = scanner.getResults();
+ this.total = engine.getTotal();
+
if (-1 < this.total) {
this.log.info("Reference Summary:");
this.log.info(" " + this.total + " products processed");
diff --git a/src/main/java/gov/nasa/pds/validate/ri/CountingAppender.java b/src/main/java/gov/nasa/pds/validate/ri/CountingAppender.java
index 3c33c9e51..00f9ce7ee 100644
--- a/src/main/java/gov/nasa/pds/validate/ri/CountingAppender.java
+++ b/src/main/java/gov/nasa/pds/validate/ri/CountingAppender.java
@@ -1,5 +1,6 @@
package gov.nasa.pds.validate.ri;
+import java.io.Serializable;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.core.Appender;
import org.apache.logging.log4j.core.ErrorHandler;
@@ -11,7 +12,7 @@ class CountingAppender implements Appender {
private int err = 0, fatal = 0, warn = 0;
private ErrorHandler handler = null;
private Filter filter = null;
- private Layout layout = null;
+ private Layout layout = null;
private String name = "";
public void addFilter(Filter newFilter) {
@@ -54,7 +55,7 @@ public ErrorHandler getHandler() {
}
@Override
- public Layout getLayout() {
+ public Layout getLayout() {
return this.layout;
}
diff --git a/src/main/java/gov/nasa/pds/validate/ri/Cylinder.java b/src/main/java/gov/nasa/pds/validate/ri/Cylinder.java
index 3066e5f91..9b2ff13f4 100644
--- a/src/main/java/gov/nasa/pds/validate/ri/Cylinder.java
+++ b/src/main/java/gov/nasa/pds/validate/ri/Cylinder.java
@@ -5,7 +5,6 @@
import org.apache.logging.log4j.Logger;
public class Cylinder implements Runnable {
- final private AuthInformation registry;
final private AuthInformation search;
final private CamShaft cam;
final private Logger log = LogManager.getLogger(Cylinder.class);
@@ -13,10 +12,9 @@ public class Cylinder implements Runnable {
final private String lidvid;
private long broken = 0;
- public Cylinder(String lidvid, AuthInformation registry, AuthInformation search, CamShaft cam) {
+ public Cylinder(String lidvid, AuthInformation search, CamShaft cam) {
this.cam = cam;
this.lidvid = lidvid;
- this.registry = registry;
this.search = search;
}
@@ -37,8 +35,7 @@ public long getBroken() {
public void run() {
try {
ArrayList referenced_valid_lidvids = new ArrayList();
- DocumentInfo search = AuthInformation.NO_AUTH.equals (this.registry) ? new OpensearchDocument(this.search) : new RegistryDocument(this.registry);;
- String magicWord = AuthInformation.NO_AUTH.equals (this.registry) ? "database." : "registry.";
+ DocumentInfo search = new OpensearchDocument(this.search);
if (search.exists(this.lidvid)) {
this.log.info(
@@ -49,11 +46,11 @@ public void run() {
else {
this.broken++;
this.reporter.error("In the search the lidvid '" + this.lidvid + "' references '"
- + reference + "' that is missing in the " + magicWord);
+ + reference + "' that is missing in the database.");
}
}
} else
- this.reporter.error("The given lidvid '" + this.lidvid + "' is missing from the " + magicWord);
+ this.reporter.error("The given lidvid '" + this.lidvid + "' is missing from the database.");
if (this.has_children(search))
this.cam.addAll(referenced_valid_lidvids);
diff --git a/src/main/java/gov/nasa/pds/validate/ri/DuplicateFileAreaFilenames.java b/src/main/java/gov/nasa/pds/validate/ri/DuplicateFileAreaFilenames.java
index 089c95db5..d4227d153 100644
--- a/src/main/java/gov/nasa/pds/validate/ri/DuplicateFileAreaFilenames.java
+++ b/src/main/java/gov/nasa/pds/validate/ri/DuplicateFileAreaFilenames.java
@@ -1,46 +1,27 @@
package gov.nasa.pds.validate.ri;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URL;
+
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
-import org.apache.http.HttpHost;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
-import org.opensearch.action.search.SearchRequest;
-import org.opensearch.action.search.SearchResponse;
-import org.opensearch.client.RestClient;
-import org.opensearch.client.RestClientBuilder;
-import org.opensearch.client.RestHighLevelClient;
-import org.opensearch.index.query.QueryBuilders;
-import org.opensearch.search.SearchHit;
-import org.opensearch.search.aggregations.bucket.terms.Terms;
-import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
-import org.opensearch.search.builder.SearchSourceBuilder;
+import gov.nasa.pds.registry.common.RestClient;
+
-class DuplicateFileAreaFilenames extends OpensearchDocument implements Runnable, RestClientBuilder.HttpClientConfigCallback, RestClientBuilder.RequestConfigCallback {
- final private AuthInformation registry;
+class DuplicateFileAreaFilenames extends OpensearchDocument implements Runnable {
final private AuthInformation search;
final private HashMap> duplicates = new HashMap>();
final private Logger log = LogManager.getLogger(CommandLineInterface.class);
private boolean done = true;
- public DuplicateFileAreaFilenames(AuthInformation registry, AuthInformation search) {
+ public DuplicateFileAreaFilenames(AuthInformation search) {
super(search);
- this.registry = registry;
this.search = search;
}
- public void findDuplicates() {
- if (AuthInformation.NO_AUTH.equals(this.registry)) {
- viaOpenSearch();
- } else {
- viaRegistry();
- }
- }
public void findDuplicatesInBackground() {
this.done = false;
Thread t = new Thread(this);
@@ -58,70 +39,25 @@ public void run() {
this.notifyAll();
}
}
- private void viaOpenSearch() {
- RestHighLevelClient client = null;
- /*
- * GET registry/_search
- * {
- * "size": 0,
- * "aggregations": {
- * "duplicates": {
- * "terms": {
- * "field": "ops:Data_File_Info/ops:file_ref",
- * "size": 100,
- * "min_doc_count": 2
- * }
- * }
- * }
- * }
- */
- SearchRequest request = new SearchRequest()
- .indices("registry")
- .source(new SearchSourceBuilder()
- .aggregation(new TermsAggregationBuilder("duplicates")
- .field("ops:Data_File_Info/ops:file_ref")
- .minDocCount(2)
- .size(this.PAGE_SIZE)
- )
- .size(0));
- SearchResponse response;
+ public void findDuplicates() {
try {
- URL url = new URL(this.search.getUrl());
- client = new RestHighLevelClient(
- RestClient.builder(new HttpHost(url.getHost(), url.getPort(), url.getProtocol()))
- .setHttpClientConfigCallback(this).setRequestConfigCallback(this));
- response = this.search(client, request);
- if (response.getAggregations() == null || response.getAggregations().get("duplicates") == null)
- return;
- for (Terms.Bucket bucket : ((Terms)response.getAggregations().get("duplicates")).getBuckets()) {
- SearchRequest findIDs = new SearchRequest()
- .indices("registry")
- .source(new SearchSourceBuilder()
- .fetchSource ("lid", null)
- .query(
- QueryBuilders.boolQuery()
- .must(QueryBuilders
- .termQuery("ops:Data_File_Info/ops:file_ref", bucket.getKeyAsString())))
- .size(this.PAGE_SIZE));
- SearchResponse duplicators = this.search(client, findIDs);
+ RestClient client = this.search.getConnectionFactory().createRestClient();
+ for (String file_ref : client.performRequest(client.createSearchRequest()
+ .buildFindDuplicates(PAGE_SIZE)
+ .setIndex(this.search.getConnectionFactory().getIndexName())).bucketValues()) {
HashSet lids = new HashSet();
- for (SearchHit hit : duplicators.getHits()) {
- lids.add(hit.getSourceAsMap().get("lid").toString());
- }
+ lids.addAll(client.performRequest(client.createSearchRequest()
+ .buildTermQuery("ops:Data_File_Info/ops:file_ref", file_ref)
+ .setReturnedFields(Arrays.asList("lid"))).fields());
if (1 < lids.size()) {
- this.log.error("Found duplicate file: " + bucket.getKeyAsString());
- this.duplicates.put (bucket.getKeyAsString(), new ArrayList(lids));
+ this.log.error("Found duplicate file: " + file_ref);
+ this.duplicates.put (file_ref, new ArrayList(lids));
}
}
- } catch (MalformedURLException e) {
- this.log.error ("Could not form a valid URL from " + this.search.getUrl(), e);
- } catch (IOException e) {
- this.log.error ("Something went wrong talking to opensearch.", e);
+ } catch (Exception e) {
+ this.log.error("Had an error communicating with opensearch", e);
}
}
- private void viaRegistry() {
- this.log.fatal("finding duplicate file area filenames is not implemented.");
- }
public synchronized void waitTillDone() {
while (!this.done) {
try {
diff --git a/src/main/java/gov/nasa/pds/validate/ri/Engine.java b/src/main/java/gov/nasa/pds/validate/ri/Engine.java
index fa13ab98d..aae9fcaaf 100644
--- a/src/main/java/gov/nasa/pds/validate/ri/Engine.java
+++ b/src/main/java/gov/nasa/pds/validate/ri/Engine.java
@@ -10,17 +10,14 @@
public class Engine implements CamShaft {
private final int cylinders;
private long broken, total = 0;
- private final AuthInformation registry;
private final AuthInformation search;
private final Queue queue = new ArrayDeque();
private final ArrayList workers = new ArrayList();
private final Logger log = LogManager.getLogger(Engine.class);
- public Engine(int cylinders, List lidvids, AuthInformation registry,
- AuthInformation search) {
+ public Engine(int cylinders, List lidvids, AuthInformation search) {
this.cylinders = cylinders;
this.queue.addAll(lidvids);
- this.registry = registry;
this.search = search;
this.total = this.queue.size();
}
@@ -64,7 +61,7 @@ public void processQueueUntilEmpty() {
synchronized (this.queue) {
lidvid = this.queue.remove();
}
- applicants.add(new Cylinder(lidvid, this.registry, this.search, this));
+ applicants.add(new Cylinder(lidvid, this.search, this));
}
if (this.cylinders == 1) {
@@ -87,7 +84,7 @@ public void processQueueUntilEmpty() {
this.workers.wait();
} catch (InterruptedException e) {
log.trace(
- "This should never happen and means there is one or more workers stuck in teh abyss",
+ "This should never happen and means there is one or more workers stuck in the abyss",
e);
}
}
diff --git a/src/main/java/gov/nasa/pds/validate/ri/OpensearchDocument.java b/src/main/java/gov/nasa/pds/validate/ri/OpensearchDocument.java
index cd4f5169d..56fc05c8e 100644
--- a/src/main/java/gov/nasa/pds/validate/ri/OpensearchDocument.java
+++ b/src/main/java/gov/nasa/pds/validate/ri/OpensearchDocument.java
@@ -1,42 +1,15 @@
package gov.nasa.pds.validate.ri;
-import java.io.IOException;
-import java.net.ConnectException;
-import java.net.URL;
-import java.security.KeyManagementException;
-import java.security.KeyStoreException;
-import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
-import java.util.Objects;
-import org.apache.http.HttpHost;
-import org.apache.http.auth.AuthScope;
-import org.apache.http.auth.UsernamePasswordCredentials;
-import org.apache.http.client.config.RequestConfig;
-import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
-import org.apache.http.impl.client.BasicCredentialsProvider;
-import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
-import org.apache.http.ssl.SSLContextBuilder;
-import org.apache.http.ssl.SSLContexts;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
-import org.opensearch.action.search.SearchRequest;
-import org.opensearch.action.search.SearchResponse;
-import org.opensearch.client.HttpAsyncResponseConsumerFactory;
-import org.opensearch.client.RequestOptions;
-import org.opensearch.client.RestClient;
-import org.opensearch.client.RestClientBuilder;
-import org.opensearch.client.RestHighLevelClient;
-import org.opensearch.index.query.QueryBuilders;
-import org.opensearch.search.SearchHit;
-import org.opensearch.search.builder.SearchSourceBuilder;
+import gov.nasa.pds.registry.common.RestClient;
-public class OpensearchDocument implements DocumentInfo, RestClientBuilder.HttpClientConfigCallback,
- RestClientBuilder.RequestConfigCallback {
- protected static OpensearchDocument sourceOverride = null;
+public class OpensearchDocument implements DocumentInfo {
protected final int PAGE_SIZE = 5000;
final private AuthInformation context;
final private HashMap> documents =
@@ -44,53 +17,34 @@ public class OpensearchDocument implements DocumentInfo, RestClientBuilder.HttpC
final private HashMap> references = new HashMap>();
final private LidvidComparator lidvid_compare = new LidvidComparator();
final private Logger log = LogManager.getLogger(OpensearchDocument.class);
- final private RequestOptions.Builder hugeMemory = RequestOptions.DEFAULT.toBuilder();
-
+ private RestClient client = null;
private void load(String lidvid) {
if (!this.documents.containsKey(lidvid)) {
try {
- RestHighLevelClient client = null;
- SearchRequest request =
- new SearchRequest()
- .indices(
- "registry")
- .source(
- new SearchSourceBuilder()
- .query(
- QueryBuilders.boolQuery()
- .must(QueryBuilders
- .termQuery(lidvid.contains("::") ? "lidvid" : "lid", lidvid)))
- .size(this.PAGE_SIZE));
- SearchResponse response;
- URL url = new URL(this.context.getUrl());
- try {
- client = new RestHighLevelClient(
- RestClient.builder(new HttpHost(url.getHost(), url.getPort(), url.getProtocol()))
- .setHttpClientConfigCallback(this).setRequestConfigCallback(this));
- response = this.search(client, request);
- if (response != null && response.getHits() != null
- && response.getHits().getTotalHits() != null) {
- if (response.getHits().getTotalHits().value == 1L)
- this.documents.put(lidvid, response.getHits().getAt(0).getSourceAsMap());
- else {
- ArrayList ids = new ArrayList();
- for (SearchHit hit : response.getHits())
- ids.add(hit.getId());
- ids.sort(lidvid_compare);
- for (SearchHit hit : response.getHits()) {
- if (hit.getId().equals(ids.get(ids.size() - 1))) {
- this.documents.put(lidvid, hit.getSourceAsMap());
- this.documents.put(hit.getId(), hit.getSourceAsMap());
- }
- }
+ synchronized (this) {
+ if (this.client == null) this.client = this.context.getConnectionFactory().createRestClient();
+ }
+ List