Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert commits back to after 2.11 release #2427

Merged
merged 24 commits into from
Nov 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
7e2d764
Revert "Add more metrics and handle emr exception message (#2422) (#2…
vmmusings Nov 13, 2023
9318c6c
Revert "Block settings in sql query settings API and add more unit te…
vmmusings Nov 13, 2023
aba6b88
Revert "Added session, statement, emrjob metrics to sql stats api (#2…
vmmusings Nov 13, 2023
728eed2
Revert "Redefine Drop Index as logical delete (#2386) (#2397)"
vmmusings Nov 13, 2023
7675008
Revert "add concurrent limit on datasource and sessions (#2390) (#2395)"
vmmusings Nov 13, 2023
6f7dd96
Revert "Add Flint Index Purging Logic (#2372) (#2389)"
vmmusings Nov 13, 2023
02abfa7
Revert "Refactoring for tags usage in test files and also added expli…
vmmusings Nov 13, 2023
17a1b0f
Revert "Enable session by default (#2373) (#2375)"
vmmusings Nov 13, 2023
13da524
Revert "Create new session if client provided session is invalid (#23…
vmmusings Nov 13, 2023
d049134
Revert "Add where clause support in create statement (#2366) (#2370)"
vmmusings Nov 13, 2023
719934e
Revert "create new session if current session not ready (#2363) (#2365)"
vmmusings Nov 13, 2023
2fad890
Revert "Handle Describe,Refresh and Show Queries Properly (#2357) (#2…
vmmusings Nov 13, 2023
d420e73
Revert "Add Session limitation (#2354) (#2359)"
vmmusings Nov 13, 2023
4a1566d
Revert "Bug Fix, support cancel query in running state (#2351) (#2353)"
vmmusings Nov 13, 2023
11d351f
Revert "Fix bug, using basic instead of basicauth (#2342) (#2355)"
vmmusings Nov 13, 2023
268b08d
Revert "Add missing tags and MV support (#2336) (#2346)"
vmmusings Nov 13, 2023
a20d458
Revert "[Backport 2.x] deprecated job-metadata-index (#2340) (#2343)"
vmmusings Nov 13, 2023
a0114f4
Revert "Integration with REPL Spark job (#2327) (#2338)"
vmmusings Nov 13, 2023
ab12221
Revert "Implement patch API for datasources (#2273) (#2329)"
vmmusings Nov 13, 2023
b9e8910
Revert "Add sessionId parameters for create async query API (#2312) (…
vmmusings Nov 13, 2023
77af17c
Revert "Add Statement (#2294) (#2318) (#2319)"
vmmusings Nov 13, 2023
3ca6a9a
Revert "Upgrade json (#2307) (#2314)"
vmmusings Nov 13, 2023
da7f82e
Revert "Minor Refactoring (#2308) (#2317)"
vmmusings Nov 13, 2023
e6eac9b
Revert "add InteractiveSession and SessionManager (#2290) (#2293) (#2…
vmmusings Nov 13, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions common/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ repositories {
dependencies {
api "org.antlr:antlr4-runtime:4.7.1"
api group: 'com.google.guava', name: 'guava', version: '32.0.1-jre'
api group: 'org.apache.logging.log4j', name: 'log4j-core', version:"${versions.log4j}"
api group: 'org.apache.logging.log4j', name: 'log4j-core', version:'2.20.0'
api group: 'org.apache.commons', name: 'commons-lang3', version: '3.12.0'
api group: 'com.squareup.okhttp3', name: 'okhttp', version: '4.9.3'
implementation 'com.github.babbel:okhttp-aws-signer:1.0.2'
Expand Down Expand Up @@ -63,4 +63,4 @@ configurations.all {
resolutionStrategy.force "org.apache.httpcomponents:httpcore:4.4.13"
resolutionStrategy.force "joda-time:joda-time:2.10.12"
resolutionStrategy.force "org.slf4j:slf4j-api:1.7.36"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -32,18 +32,11 @@ public enum Key {
QUERY_SIZE_LIMIT("plugins.query.size_limit"),
ENCYRPTION_MASTER_KEY("plugins.query.datasources.encryption.masterkey"),
DATASOURCES_URI_HOSTS_DENY_LIST("plugins.query.datasources.uri.hosts.denylist"),
DATASOURCES_LIMIT("plugins.query.datasources.limit"),

METRICS_ROLLING_WINDOW("plugins.query.metrics.rolling_window"),
METRICS_ROLLING_INTERVAL("plugins.query.metrics.rolling_interval"),
SPARK_EXECUTION_ENGINE_CONFIG("plugins.query.executionengine.spark.config"),
CLUSTER_NAME("cluster.name"),
SPARK_EXECUTION_SESSION_LIMIT("plugins.query.executionengine.spark.session.limit"),
SPARK_EXECUTION_REFRESH_JOB_LIMIT("plugins.query.executionengine.spark.refresh_job.limit"),
SESSION_INDEX_TTL("plugins.query.executionengine.spark.session.index.ttl"),
RESULT_INDEX_TTL("plugins.query.executionengine.spark.result.index.ttl"),
AUTO_INDEX_MANAGEMENT_ENABLED(
"plugins.query.executionengine.spark.auto_index_management.enabled");
CLUSTER_NAME("cluster.name");

@Getter private final String keyValue;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

package org.opensearch.sql.datasource;

import java.util.Map;
import java.util.Set;
import org.opensearch.sql.datasource.model.DataSource;
import org.opensearch.sql.datasource.model.DataSourceMetadata;
Expand Down Expand Up @@ -57,19 +56,12 @@ public interface DataSourceService {
void createDataSource(DataSourceMetadata metadata);

/**
* Updates {@link DataSource} corresponding to dataSourceMetadata (all fields needed).
* Updates {@link DataSource} corresponding to dataSourceMetadata.
*
* @param dataSourceMetadata {@link DataSourceMetadata}.
*/
void updateDataSource(DataSourceMetadata dataSourceMetadata);

/**
* Patches {@link DataSource} corresponding to the given name (only fields to be changed needed).
*
* @param dataSourceData
*/
void patchDataSource(Map<String, Object> dataSourceData);

/**
* Deletes {@link DataSource} corresponding to the DataSource name.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
Expand All @@ -25,24 +25,11 @@

@Getter
@Setter
@AllArgsConstructor
@EqualsAndHashCode
@JsonIgnoreProperties(ignoreUnknown = true)
public class DataSourceMetadata {

public static final String DEFAULT_RESULT_INDEX = "query_execution_result";
public static final int MAX_RESULT_INDEX_NAME_SIZE = 255;
// OS doesn’t allow uppercase: https://tinyurl.com/yse2xdbx
public static final String RESULT_INDEX_NAME_PATTERN = "[a-z0-9_-]+";
public static String INVALID_RESULT_INDEX_NAME_SIZE =
"Result index name size must contains less than "
+ MAX_RESULT_INDEX_NAME_SIZE
+ " characters";
public static String INVALID_CHAR_IN_RESULT_INDEX_NAME =
"Result index name has invalid character. Valid characters are a-z, 0-9, -(hyphen) and"
+ " _(underscore)";
public static String INVALID_RESULT_INDEX_PREFIX =
"Result index must start with " + DEFAULT_RESULT_INDEX;

@JsonProperty private String name;

@JsonProperty private String description;
Expand All @@ -57,31 +44,18 @@ public class DataSourceMetadata {

@JsonProperty private String resultIndex;

public static Function<String, String> DATASOURCE_TO_RESULT_INDEX =
datasourceName -> String.format("%s_%s", DEFAULT_RESULT_INDEX, datasourceName);

public DataSourceMetadata(
String name,
String description,
DataSourceType connector,
List<String> allowedRoles,
Map<String, String> properties,
String resultIndex) {
this.name = name;
String errorMessage = validateCustomResultIndex(resultIndex);
if (errorMessage != null) {
throw new IllegalArgumentException(errorMessage);
}
if (resultIndex == null) {
this.resultIndex = fromNameToCustomResultIndex();
} else {
this.resultIndex = resultIndex;
}

this.connector = connector;
this.description = description;
this.description = StringUtils.EMPTY;
this.properties = properties;
this.allowedRoles = allowedRoles;
this.resultIndex = resultIndex;
}

public DataSourceMetadata() {
Expand All @@ -97,56 +71,9 @@ public DataSourceMetadata() {
public static DataSourceMetadata defaultOpenSearchDataSourceMetadata() {
return new DataSourceMetadata(
DEFAULT_DATASOURCE_NAME,
StringUtils.EMPTY,
DataSourceType.OPENSEARCH,
Collections.emptyList(),
ImmutableMap.of(),
null);
}

public String validateCustomResultIndex(String resultIndex) {
if (resultIndex == null) {
return null;
}
if (resultIndex.length() > MAX_RESULT_INDEX_NAME_SIZE) {
return INVALID_RESULT_INDEX_NAME_SIZE;
}
if (!resultIndex.matches(RESULT_INDEX_NAME_PATTERN)) {
return INVALID_CHAR_IN_RESULT_INDEX_NAME;
}
if (resultIndex != null && !resultIndex.startsWith(DEFAULT_RESULT_INDEX)) {
return INVALID_RESULT_INDEX_PREFIX;
}
return null;
}

/**
* Since we are using datasource name to create result index, we need to make sure that the final
* name is valid
*
* @param resultIndex result index name
* @return valid result index name
*/
private String convertToValidResultIndex(String resultIndex) {
// Limit Length
if (resultIndex.length() > MAX_RESULT_INDEX_NAME_SIZE) {
resultIndex = resultIndex.substring(0, MAX_RESULT_INDEX_NAME_SIZE);
}

// Pattern Matching: Remove characters that don't match the pattern
StringBuilder validChars = new StringBuilder();
for (char c : resultIndex.toCharArray()) {
if (String.valueOf(c).matches(RESULT_INDEX_NAME_PATTERN)) {
validChars.append(c);
}
}
return validChars.toString();
}

public String fromNameToCustomResultIndex() {
if (name == null) {
throw new IllegalArgumentException("Datasource name cannot be null");
}
return convertToValidResultIndex(DATASOURCE_TO_RESULT_INDEX.apply(name.toLowerCase()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.opensearch.sql.DataSourceSchemaName;
import org.opensearch.sql.analysis.symbol.Namespace;
Expand Down Expand Up @@ -198,7 +197,6 @@ public Set<DataSourceMetadata> getDataSourceMetadata(boolean isDefaultDataSource
ds ->
new DataSourceMetadata(
ds.getName(),
StringUtils.EMPTY,
ds.getConnectorType(),
Collections.emptyList(),
ImmutableMap.of(),
Expand Down Expand Up @@ -233,9 +231,6 @@ public DataSource getDataSource(String dataSourceName) {
@Override
public void updateDataSource(DataSourceMetadata dataSourceMetadata) {}

@Override
public void patchDataSource(Map<String, Object> dataSourceData) {}

@Override
public void deleteDataSource(String dataSourceName) {}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import java.util.LinkedHashMap;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
Expand Down Expand Up @@ -63,7 +62,6 @@ void testIterator() {
dataSource ->
new DataSourceMetadata(
dataSource.getName(),
StringUtils.EMPTY,
dataSource.getConnectorType(),
Collections.emptyList(),
ImmutableMap.of(),
Expand Down
4 changes: 1 addition & 3 deletions datasources/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ repositories {
dependencies {
implementation project(':core')
implementation project(':protocol')
implementation project(':opensearch')
implementation project(':legacy')
implementation group: 'org.opensearch', name: 'opensearch', version: "${opensearch_version}"
implementation group: 'org.opensearch', name: 'opensearch-x-content', version: "${opensearch_version}"
implementation group: 'org.opensearch', name: 'common-utils', version: "${opensearch_build}"
Expand All @@ -37,7 +35,7 @@ dependencies {
test {
useJUnitPlatform()
testLogging {
events "skipped", "failed"
events "passed", "skipped", "failed"
exceptionFormat "full"
}
}
Expand Down

This file was deleted.

This file was deleted.

Loading
Loading