Skip to content

Commit

Permalink
[flink] Adopt open(OpenContext) in RichFunction (#4581)
Browse files Browse the repository at this point in the history
  • Loading branch information
yunfengzhou-hub authored Nov 25, 2024
1 parent d9a1b80 commit 76a4134
Show file tree
Hide file tree
Showing 20 changed files with 319 additions and 24 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.api.common.functions;

import org.apache.flink.annotation.PublicEvolving;

/**
* The {@link OpenContext} interface provides necessary information required by the {@link
* RichFunction} when it is opened. The {@link OpenContext} is currently empty because it can be
* used to add more methods without affecting the signature of {@code RichFunction#open}.
*/
@PublicEvolving
public interface OpenContext {}
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.api.common.functions;

import org.apache.flink.annotation.PublicEvolving;

/**
* The {@link OpenContext} interface provides necessary information required by the {@link
* RichFunction} when it is opened. The {@link OpenContext} is currently empty because it can be
* used to add more methods without affecting the signature of {@code RichFunction#open}.
*/
@PublicEvolving
public interface OpenContext {}
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.api.common.functions;

import org.apache.flink.annotation.PublicEvolving;

/**
* The {@link OpenContext} interface provides necessary information required by the {@link
* RichFunction} when it is opened. The {@link OpenContext} is currently empty because it can be
* used to add more methods without affecting the signature of {@code RichFunction#open}.
*/
@PublicEvolving
public interface OpenContext {}
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.api.common.functions;

import org.apache.flink.annotation.PublicEvolving;

/**
* The {@link OpenContext} interface provides necessary information required by the {@link
* RichFunction} when it is opened. The {@link OpenContext} is currently empty because it can be
* used to add more methods without affecting the signature of {@code RichFunction#open}.
*/
@PublicEvolving
public interface OpenContext {}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.apache.paimon.flink.sink.LogSinkFunction;
import org.apache.paimon.table.sink.SinkRecord;

import org.apache.flink.api.common.functions.OpenContext;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaException;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
Expand Down Expand Up @@ -65,7 +66,16 @@ public void setWriteCallback(WriteCallback writeCallback) {
this.writeCallback = writeCallback;
}

@Override
/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 1.18-.
*/
public void open(OpenContext openContext) throws Exception {
open(new Configuration());
}

/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 2.0+.
*/
public void open(Configuration configuration) throws Exception {
super.open(configuration);
Callback baseCallback = requireNonNull(callback);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.apache.paimon.catalog.Identifier;
import org.apache.paimon.types.DataField;

import org.apache.flink.api.common.functions.OpenContext;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple2;
Expand Down Expand Up @@ -74,7 +75,16 @@ public CdcDynamicTableParsingProcessFunction(
this.parserFactory = parserFactory;
}

@Override
/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 1.18-.
*/
public void open(OpenContext openContext) throws Exception {
open(new Configuration());
}

/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 2.0+.
*/
public void open(Configuration parameters) throws Exception {
parser = parserFactory.create();
catalog = catalogLoader.load();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import org.apache.paimon.types.DataField;

import org.apache.flink.api.common.functions.OpenContext;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.typeutils.ListTypeInfo;
import org.apache.flink.configuration.Configuration;
Expand Down Expand Up @@ -51,7 +52,16 @@ public CdcMultiTableParsingProcessFunction(EventParser.Factory<T> parserFactory)
this.parserFactory = parserFactory;
}

@Override
/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 1.18-.
*/
public void open(OpenContext openContext) throws Exception {
open(new Configuration());
}

/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 2.0+.
*/
public void open(Configuration parameters) throws Exception {
parser = parserFactory.create();
updatedDataFieldsOutputTags = new HashMap<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import org.apache.paimon.types.DataField;

import org.apache.flink.api.common.functions.OpenContext;
import org.apache.flink.api.java.typeutils.ListTypeInfo;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.ProcessFunction;
Expand Down Expand Up @@ -50,7 +51,16 @@ public CdcParsingProcessFunction(EventParser.Factory<T> parserFactory) {
this.parserFactory = parserFactory;
}

@Override
/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 1.18-.
*/
public void open(OpenContext openContext) throws Exception {
open(new Configuration());
}

/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 2.0+.
*/
public void open(Configuration parameters) throws Exception {
parser = parserFactory.create();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.apache.paimon.utils.Preconditions;
import org.apache.paimon.utils.StringUtils;

import org.apache.flink.api.common.functions.OpenContext;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.slf4j.Logger;
Expand Down Expand Up @@ -73,7 +74,16 @@ protected UpdatedDataFieldsProcessFunctionBase(Catalog.Loader catalogLoader) {
this.catalogLoader = catalogLoader;
}

@Override
/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 1.18-.
*/
public void open(OpenContext openContext) throws Exception {
open(new Configuration());
}

/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 2.0+.
*/
public void open(Configuration parameters) {
this.catalog = catalogLoader.load();
this.allowUpperCase = this.catalog.allowUpperCase();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.paimon.table.FileStoreTable;
import org.apache.paimon.table.Table;

import org.apache.flink.api.common.functions.OpenContext;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
Expand All @@ -44,7 +45,16 @@ public QueryAddressRegister(Table table) {
this.serviceManager = ((FileStoreTable) table).store().newServiceManager();
}

@Override
/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 1.18-.
*/
public void open(OpenContext openContext) throws Exception {
open(new Configuration());
}

/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 2.0+.
*/
public void open(Configuration parameters) throws Exception {
this.executors = new TreeMap<>();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.apache.paimon.table.source.TableRead;
import org.apache.paimon.table.system.FileMonitorTable;

import org.apache.flink.api.common.functions.OpenContext;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
Expand Down Expand Up @@ -70,7 +71,16 @@ public QueryFileMonitor(Table table) {
.toMillis();
}

@Override
/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 1.18-.
*/
public void open(OpenContext openContext) throws Exception {
open(new Configuration());
}

/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink 2.0+.
*/
public void open(Configuration parameters) throws Exception {
FileMonitorTable monitorTable = new FileMonitorTable((FileStoreTable) table);
ReadBuilder readBuilder = monitorTable.newReadBuilder();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.apache.paimon.utils.SerializableSupplier;

import org.apache.flink.annotation.Internal;
import org.apache.flink.api.common.functions.OpenContext;
import org.apache.flink.api.common.functions.Partitioner;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
Expand Down Expand Up @@ -182,9 +183,19 @@ public KeyAndSizeExtractor(RowType rowType, boolean isSortBySize) {
this.isSortBySize = isSortBySize;
}

@Override
/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink
* 1.18-.
*/
public void open(OpenContext openContext) throws Exception {
open(new Configuration());
}

/**
* Do not annotate with <code>@override</code> here to maintain compatibility with Flink
* 2.0+.
*/
public void open(Configuration parameters) throws Exception {
super.open(parameters);
InternalRowToSizeVisitor internalRowToSizeVisitor = new InternalRowToSizeVisitor();
fieldSizeCalculator =
rowType.getFieldTypes().stream()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@
import org.apache.paimon.table.FileStoreTable;
import org.apache.paimon.table.sink.SinkRecord;

import org.apache.flink.api.common.functions.Function;
import org.apache.flink.api.common.functions.OpenContext;
import org.apache.flink.api.common.functions.RichFunction;
import org.apache.flink.api.common.functions.util.FunctionUtils;
import org.apache.flink.api.common.state.CheckpointListener;
Expand All @@ -42,6 +44,8 @@
import javax.annotation.Nullable;

import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Objects;

Expand Down Expand Up @@ -97,17 +101,29 @@ public void open() throws Exception {

this.sinkContext = new SimpleContext(getProcessingTimeService());
if (logSinkFunction != null) {
// to stay compatible with Flink 1.18-
if (logSinkFunction instanceof RichFunction) {
RichFunction richFunction = (RichFunction) logSinkFunction;
richFunction.open(new Configuration());
}

openFunction(logSinkFunction);
logCallback = new LogWriteCallback();
logSinkFunction.setWriteCallback(logCallback);
}
}

private static void openFunction(Function function) throws Exception {
if (function instanceof RichFunction) {
RichFunction richFunction = (RichFunction) function;

try {
Method method = RichFunction.class.getDeclaredMethod("open", OpenContext.class);
method.invoke(richFunction, new OpenContext() {});
return;
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
// to stay compatible with Flink 1.18-
}

Method method = RichFunction.class.getDeclaredMethod("open", Configuration.class);
method.invoke(richFunction, new Configuration());
}
}

@Override
public void processWatermark(Watermark mark) throws Exception {
super.processWatermark(mark);
Expand Down
Loading

0 comments on commit 76a4134

Please sign in to comment.