From c017233fed35d09b515cad236963e2529382c558 Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Thu, 7 Nov 2024 12:48:03 -0800 Subject: [PATCH] Fix IntelliJ warnings in IcebergPageSourceProvider --- .../iceberg/IcebergPageSourceProvider.java | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSourceProvider.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSourceProvider.java index ed1f98821ea8c..9f57a66719ddc 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSourceProvider.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSourceProvider.java @@ -436,7 +436,7 @@ private TupleDomain prunePredicate( return unenforcedPredicate // Filter out partition columns domains from the dynamic filter because they should be irrelevant at data file level - .filter((columnHandle, domain) -> !partitionKeys.containsKey(columnHandle.getId())) + .filter((columnHandle, _) -> !partitionKeys.containsKey(columnHandle.getId())) // remove domains from predicate that fully contain split data because they are irrelevant for filtering .filter((handle, domain) -> !domain.contains(fileStatisticsDomain.getDomain(handle, domain.getType()))); } @@ -481,7 +481,7 @@ private ConnectorPageSource openDeletes( .get(); } - public ReaderPageSourceWithRowPositions createDataPageSource( + private ReaderPageSourceWithRowPositions createDataPageSource( ConnectorSession session, TrinoInputFile inputFile, long start, @@ -880,7 +880,7 @@ private static ReaderPageSourceWithRowPositions createParquetPageSource( List parquetFields = readBaseColumns.stream() .map(column -> parquetIdToField.get(column.getId())) - .collect(toList()); + .toList(); MessageType requestedSchema = getMessageType(regularColumns, fileSchema.getName(), parquetIdToField); Map, ColumnDescriptor> descriptorsByPath = getDescriptors(fileSchema, requestedSchema); @@ -1025,7 +1025,7 @@ else if (type instanceof GroupType groupType) { private static MessageType getMessageType(List regularColumns, String fileSchemaName, Map parquetIdToField) { return projectSufficientColumns(regularColumns) - .map(readerColumns -> readerColumns.get().stream().map(IcebergColumnHandle.class::cast).collect(toUnmodifiableList())) + .map(readerColumns -> readerColumns.get().stream().map(IcebergColumnHandle.class::cast).toList()) .orElse(regularColumns) .stream() .map(column -> getColumnType(column, parquetIdToField)) @@ -1205,9 +1205,7 @@ public static ProjectedLayout createProjectedLayout(OrcColumn root, List>> dereferencesByField = fieldIdDereferences.stream() - .collect(groupingBy( - sequence -> sequence.get(0), - mapping(sequence -> sequence.subList(1, sequence.size()), toUnmodifiableList()))); + .collect(groupingBy(List::getFirst, mapping(sequence -> sequence.subList(1, sequence.size()), toUnmodifiableList()))); ImmutableMap.Builder fieldLayouts = ImmutableMap.builder(); for (OrcColumn nestedColumn : root.getNestedColumns()) { @@ -1231,7 +1229,7 @@ public ProjectedLayout getFieldLayout(OrcColumn orcColumn) /** * Creates a mapping between the input {@code columns} and base columns if required. */ - public static Optional projectBaseColumns(List columns) + private static Optional projectBaseColumns(List columns) { requireNonNull(columns, "columns is null"); @@ -1292,7 +1290,7 @@ private static Optional projectSufficientColumns(List getParquetTupleDomain(Map, Col effectivePredicate.getDomains().orElseThrow().forEach((columnHandle, domain) -> { ColumnIdentity columnIdentity = columnHandle.getColumnIdentity(); // skip looking up predicates for complex types as Parquet only stores stats for primitives - if (PRIMITIVE.equals(columnIdentity.getTypeCategory())) { + if (PRIMITIVE == columnIdentity.getTypeCategory()) { ColumnDescriptor descriptor = descriptorsById.get(columnHandle.getId()); if (descriptor != null) { predicate.put(descriptor, domain);