Skip to content

Commit

Permalink
Merge pull request #3478 from ingef/release
Browse files Browse the repository at this point in the history
Merge Release
  • Loading branch information
awildturtok authored Jul 1, 2024
2 parents 5f34c51 + 1cd9bf1 commit 8fd1c53
Show file tree
Hide file tree
Showing 211 changed files with 3,313 additions and 2,554 deletions.
2 changes: 1 addition & 1 deletion backend/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@
<dependency>
<groupId>io.github.classgraph</groupId>
<artifactId>classgraph</artifactId>
<version>4.8.97</version>
<version>4.8.112</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,14 +28,10 @@
import com.bakdata.conquery.models.datasets.Column;
import com.bakdata.conquery.models.datasets.SecondaryIdDescription;
import com.bakdata.conquery.models.datasets.concepts.Concept;
import com.bakdata.conquery.models.datasets.concepts.ConceptElement;
import com.bakdata.conquery.models.datasets.concepts.Connector;
import com.bakdata.conquery.models.datasets.concepts.ValidityDate;
import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeNode;
import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept;
import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId;
import com.bakdata.conquery.models.query.DateAggregationMode;
import com.bakdata.conquery.models.query.PrintSettings;
import com.bakdata.conquery.models.query.QueryExecutionContext;
import com.bakdata.conquery.models.query.QueryPlanContext;
import com.bakdata.conquery.models.query.QueryResolveContext;
Expand Down Expand Up @@ -289,7 +285,7 @@ private List<ResultInfo> createResultInfos(Map<SecondaryIdDescription, Integer>
semantics.add(new SemanticType.ConceptColumnT(concept));

if (!isRawConceptValues()) {
resultType = new ResultType.StringT((o, printSettings) -> printValue(concept, o, printSettings));
resultType = new ResultType.StringT(concept::printConceptLocalId);
}
}
else {
Expand All @@ -303,40 +299,6 @@ private List<ResultInfo> createResultInfos(Map<SecondaryIdDescription, Integer>
return List.of(infos);
}

/**
* rawValue is expected to be an Integer, expressing a localId for {@link TreeConcept#getElementByLocalId(int)}.
* <p>
* If {@link PrintSettings#isPrettyPrint()} is true, {@link ConceptElement#getLabel()} is used to print.
* If {@link PrintSettings#isPrettyPrint()} is false, {@link ConceptElement#getId()} is used to print.
*/
public static String printValue(Concept concept, Object rawValue, PrintSettings printSettings) {

if (rawValue == null) {
return null;
}

if (!(concept instanceof TreeConcept)) {
return Objects.toString(rawValue);
}

final TreeConcept tree = (TreeConcept) concept;

final int localId = (int) rawValue;

final ConceptTreeNode<?> node = tree.getElementByLocalId(localId);

if (!printSettings.isPrettyPrint()) {
return node.getId().toString();
}

if (node.getDescription() == null) {
return node.getName();
}

return node.getName() + " - " + node.getDescription();

}

@Override
public void visit(Consumer<Visitable> visitor) {
visitor.accept(this);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import java.io.IOException;
import java.math.BigDecimal;
import java.util.Set;

import javax.annotation.Nonnull;

Expand All @@ -19,7 +20,7 @@
import com.bakdata.conquery.models.query.QueryResolveContext;
import com.bakdata.conquery.models.query.queryplan.filter.FilterNode;
import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext;
import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptConversionTables;
import com.bakdata.conquery.sql.conversion.cqelement.concept.ConnectorSqlTables;
import com.bakdata.conquery.sql.conversion.cqelement.concept.FilterContext;
import com.bakdata.conquery.sql.conversion.model.SqlIdColumns;
import com.bakdata.conquery.sql.conversion.model.filter.SqlFilters;
Expand Down Expand Up @@ -61,15 +62,16 @@ public abstract class FilterValue<VALUE> {
private VALUE value;


public void resolve(QueryResolveContext context) {};
public void resolve(QueryResolveContext context) {
}

public FilterNode<?> createNode() {
return getFilter().createFilterNode(getValue());
}

public SqlFilters convertToSqlFilter(SqlIdColumns ids, ConversionContext context, ConceptConversionTables tables) {
public SqlFilters convertToSqlFilter(SqlIdColumns ids, ConversionContext context, ConnectorSqlTables tables) {
FilterContext<VALUE> filterContext = FilterContext.forConceptConversion(ids, value, context, tables);
SqlFilters sqlFilters = filter.convertToSqlFilter(filterContext);
SqlFilters sqlFilters = filter.createConverter().convertToSqlFilter(filter, filterContext);
if (context.isNegation()) {
return new SqlFilters(sqlFilters.getSelects(), sqlFilters.getWhereClauses().negated());
}
Expand All @@ -78,14 +80,14 @@ public SqlFilters convertToSqlFilter(SqlIdColumns ids, ConversionContext context

public Condition convertForTableExport(SqlIdColumns ids, ConversionContext context) {
FilterContext<VALUE> filterContext = FilterContext.forTableExport(ids, value, context);
return filter.convertForTableExport(filterContext);
return filter.createConverter().convertForTableExport(filter, filterContext);
}

@NoArgsConstructor
@CPSType(id = FrontendFilterType.Fields.MULTI_SELECT, base = FilterValue.class)
@ToString(callSuper = true)
public static class CQMultiSelectFilter extends FilterValue<String[]> {
public CQMultiSelectFilter(@NsIdRef Filter<String[]> filter, String[] value) {
public static class CQMultiSelectFilter extends FilterValue<Set<String>> {
public CQMultiSelectFilter(@NsIdRef Filter<Set<String>> filter, Set<String> value) {
super(filter, value);
}

Expand All @@ -94,8 +96,8 @@ public CQMultiSelectFilter(@NsIdRef Filter<String[]> filter, String[] value) {
@NoArgsConstructor
@CPSType(id = FrontendFilterType.Fields.BIG_MULTI_SELECT, base = FilterValue.class)
@ToString(callSuper = true)
public static class CQBigMultiSelectFilter extends FilterValue<String[]> {
public CQBigMultiSelectFilter(@NsIdRef Filter<String[]> filter, String[] value) {
public static class CQBigMultiSelectFilter extends FilterValue<Set<String>> {
public CQBigMultiSelectFilter(@NsIdRef Filter<Set<String>> filter, Set<String> value) {
super(filter, value);
}

Expand Down Expand Up @@ -217,7 +219,11 @@ public GroupFilterValue deserialize(JsonParser p, DeserializationContext ctxt) t
final Filter<?> filter = nsIdDeserializer.deserialize(filterTraverse, ctxt);

if (!(filter instanceof GroupFilter)) {
throw InvalidTypeIdException.from(filterNode.traverse(), GroupFilter.class, String.format("Expected filter of type %s but was: %s", GroupFilter.class, filter != null ? filter.getClass() : null));
throw InvalidTypeIdException.from(filterNode.traverse(), GroupFilter.class, String.format("Expected filter of type %s but was: %s", GroupFilter.class,
filter != null
? filter.getClass()
: null
));
}
GroupFilter groupFilter = (GroupFilter) filter;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,7 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Vector;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.*;

import com.bakdata.conquery.Conquery;
import com.bakdata.conquery.mode.cluster.ClusterManager;
Expand Down Expand Up @@ -84,7 +80,7 @@ public void startStandalone(Environment environment, Namespace namespace, Conque
.setNameFormat("ShardNode Storage Loader %d")
.setUncaughtExceptionHandler((t, e) -> {
ConqueryMDC.setLocation(t.getName());
log.error(t.getName() + " failed to init storage of ShardNode", e);
log.error("{} failed to init storage of ShardNode", t.getName(), e);
})
.build()
);
Expand Down Expand Up @@ -121,6 +117,7 @@ public void startStandalone(Environment environment, Namespace namespace, Conque
for (Future<ShardNode> f : tasks) {
try {
f.get();

}
catch (ExecutionException e) {
log.error("during ShardNodes creation", e);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
package com.bakdata.conquery.commands;

import java.time.Duration;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import jakarta.validation.Validator;

import com.bakdata.conquery.io.cps.CPSTypeIdResolver;
import com.bakdata.conquery.io.jackson.MutableInjectableValues;
Expand All @@ -31,24 +31,18 @@
import com.bakdata.conquery.tasks.PermissionCleanupTask;
import com.bakdata.conquery.tasks.QueryCleanupTask;
import com.bakdata.conquery.tasks.ReloadMetaStorageTask;
import com.bakdata.conquery.util.io.ConqueryMDC;
import com.fasterxml.jackson.databind.DeserializationConfig;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationConfig;
import com.google.common.base.Throwables;
import io.dropwizard.core.setup.Environment;
import io.dropwizard.jersey.DropwizardResourceConfig;
import io.dropwizard.lifecycle.Managed;
import jakarta.validation.Validator;
import lombok.Getter;
import lombok.NonNull;
import lombok.SneakyThrows;
import lombok.experimental.Delegate;
import lombok.extern.slf4j.Slf4j;
import org.apache.mina.core.service.IoHandlerAdapter;
import org.apache.mina.core.session.IdleStatus;
import org.apache.mina.core.session.IoSession;
import org.apache.mina.filter.FilterEvent;
import org.glassfish.jersey.internal.inject.AbstractBinder;

/**
Expand All @@ -58,7 +52,7 @@
*/
@Slf4j
@Getter
public class ManagerNode extends IoHandlerAdapter implements Managed {
public class ManagerNode implements Managed {

public static final String DEFAULT_NAME = "manager";

Expand Down Expand Up @@ -281,45 +275,4 @@ public void stop() throws Exception {
}

}

private void setLocation(IoSession session) {
final String loc = session.getLocalAddress().toString();
ConqueryMDC.setLocation(loc);
}

@Override
public void sessionClosed(IoSession session) {
setLocation(session);
log.info("Disconnected.");
}

@Override
public void sessionCreated(IoSession session) {
setLocation(session);
log.debug("Session created.");
}

@Override
public void sessionIdle(IoSession session, IdleStatus status) {
setLocation(session);
log.warn("Session idle {}. Last read: {}. Last write: {}.", status, Instant.ofEpochMilli(session.getLastReadTime()), Instant.ofEpochMilli(session.getLastWriteTime()));
}

@Override
public void messageSent(IoSession session, Object message) {
setLocation(session);
log.trace("Message sent: {}", message);
}

@Override
public void inputClosed(IoSession session) {
setLocation(session);
log.info("Session closed.");
}

@Override
public void event(IoSession session, FilterEvent event) throws Exception {
setLocation(session);
log.trace("Event handled: {}", event);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,9 @@ public ExternalFormBackendApi(Client client, URI baseURI, String formConfigPath,
this.tokenCreator = tokenCreator;
this.conqueryApiUrl = conqueryApiUrl;

client.register(authFilterProvider.getFilter());
if (authFilterProvider != null) {
client.register(authFilterProvider.getFilter());
}

baseTarget = this.client.target(baseURI);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
import com.bakdata.conquery.models.index.IndexService;
import com.bakdata.conquery.models.jobs.JobManager;
import com.bakdata.conquery.models.worker.DatasetRegistry;
import com.bakdata.conquery.models.worker.DistributedNamespace;
import com.bakdata.conquery.models.worker.LocalNamespace;
import com.bakdata.conquery.models.worker.Namespace;
import io.dropwizard.core.setup.Environment;

Expand All @@ -29,26 +27,10 @@ static InternalObjectMapperCreator newInternalObjectMapperCreator(ConqueryConfig
return new InternalObjectMapperCreator(config, validator);
}

static DatasetRegistry<DistributedNamespace> createDistributedDatasetRegistry(
NamespaceHandler<DistributedNamespace> namespaceHandler,
ConqueryConfig config,
InternalObjectMapperCreator creator
) {
return createDatasetRegistry(namespaceHandler, creator, config);
}

static DatasetRegistry<LocalNamespace> createLocalDatasetRegistry(
NamespaceHandler<LocalNamespace> namespaceHandler,
static <N extends Namespace> DatasetRegistry<N> createDatasetRegistry(
NamespaceHandler<N> namespaceHandler,
ConqueryConfig config,
InternalObjectMapperCreator creator
) {
return createDatasetRegistry(namespaceHandler, creator, config);
}

private static <N extends Namespace> DatasetRegistry<N> createDatasetRegistry(
NamespaceHandler<N> namespaceHandler,
InternalObjectMapperCreator creator,
ConqueryConfig config
) {
final IndexService indexService = new IndexService(config.getCsv().createCsvParserSettings(), config.getIndex().getEmptyLabel());
DatasetRegistry<N> datasetRegistry = new DatasetRegistry<>(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,28 +2,22 @@

import java.io.IOException;
import java.net.InetSocketAddress;
import jakarta.validation.Validator;

import com.bakdata.conquery.io.jackson.View;
import com.bakdata.conquery.io.mina.BinaryJacksonCoder;
import com.bakdata.conquery.io.mina.CQProtocolCodecFilter;
import com.bakdata.conquery.io.mina.ChunkReader;
import com.bakdata.conquery.io.mina.ChunkWriter;
import com.bakdata.conquery.io.mina.MinaAttributes;
import com.bakdata.conquery.io.mina.NetworkSession;
import com.bakdata.conquery.io.mina.*;
import com.bakdata.conquery.mode.InternalObjectMapperCreator;
import com.bakdata.conquery.models.config.ConqueryConfig;
import com.bakdata.conquery.models.jobs.Job;
import com.bakdata.conquery.models.jobs.JobManager;
import com.bakdata.conquery.models.jobs.ReactingJob;
import com.bakdata.conquery.models.messages.SlowMessage;
import com.bakdata.conquery.models.messages.namespaces.specific.ShutdownShard;
import com.bakdata.conquery.models.messages.network.MessageToManagerNode;
import com.bakdata.conquery.models.messages.network.NetworkMessageContext;
import com.bakdata.conquery.models.worker.DatasetRegistry;
import com.bakdata.conquery.models.worker.DistributedNamespace;
import com.bakdata.conquery.util.io.ConqueryMDC;
import com.fasterxml.jackson.databind.ObjectMapper;
import jakarta.validation.Validator;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
Expand Down Expand Up @@ -108,8 +102,6 @@ public void start() throws IOException {
}

public void stop() {
clusterState.getShardNodes().forEach(((socketAddress, shardNodeInformation) -> shardNodeInformation.send(new ShutdownShard())));

try {
acceptor.dispose();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,7 @@
import java.util.List;
import java.util.function.Supplier;

import com.bakdata.conquery.mode.DelegateManager;
import com.bakdata.conquery.mode.ImportHandler;
import com.bakdata.conquery.mode.InternalObjectMapperCreator;
import com.bakdata.conquery.mode.ManagerProvider;
import com.bakdata.conquery.mode.NamespaceHandler;
import com.bakdata.conquery.mode.StorageListener;
import com.bakdata.conquery.mode.*;
import com.bakdata.conquery.models.config.ConqueryConfig;
import com.bakdata.conquery.models.jobs.JobManager;
import com.bakdata.conquery.models.worker.ClusterHealthCheck;
Expand All @@ -27,7 +22,7 @@ public ClusterManager provideManager(ConqueryConfig config, Environment environm
final InternalObjectMapperCreator creator = ManagerProvider.newInternalObjectMapperCreator(config, environment.getValidator());
final ClusterState clusterState = new ClusterState();
final NamespaceHandler<DistributedNamespace> namespaceHandler = new ClusterNamespaceHandler(clusterState, config, creator);
final DatasetRegistry<DistributedNamespace> datasetRegistry = ManagerProvider.createDistributedDatasetRegistry(namespaceHandler, config, creator);
final DatasetRegistry<DistributedNamespace> datasetRegistry = ManagerProvider.createDatasetRegistry(namespaceHandler, config, creator);
creator.init(datasetRegistry);

final ClusterConnectionManager connectionManager =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public DelegateManager<LocalNamespace> provideManager(ConqueryConfig config, Env

InternalObjectMapperCreator creator = ManagerProvider.newInternalObjectMapperCreator(config, environment.getValidator());
NamespaceHandler<LocalNamespace> namespaceHandler = new LocalNamespaceHandler(config, creator, dialectFactory);
DatasetRegistry<LocalNamespace> datasetRegistry = ManagerProvider.createLocalDatasetRegistry(namespaceHandler, config, creator);
DatasetRegistry<LocalNamespace> datasetRegistry = ManagerProvider.createDatasetRegistry(namespaceHandler, config, creator);
creator.init(datasetRegistry);

return new DelegateManager<>(
Expand Down
Loading

0 comments on commit 8fd1c53

Please sign in to comment.