Skip to content

Commit

Permalink
[FLINK-36832] Remove Deprecated class and relevant tests.
Browse files Browse the repository at this point in the history
  • Loading branch information
lvyanquan committed Dec 3, 2024
1 parent 59baacc commit 1f586c9
Show file tree
Hide file tree
Showing 88 changed files with 57 additions and 21,141 deletions.
Original file line number Diff line number Diff line change
@@ -1 +0,0 @@
Method <org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer.getTransactionCoordinatorId()> calls method <org.apache.flink.streaming.connectors.kafka.internals.FlinkKafkaInternalProducer.getTransactionCoordinatorId()> in (FlinkKafkaProducer.java:1327)

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema.open(org.apache.flink.api.common.serialization.SerializationSchema$InitializationContext, org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema$KafkaSinkContext): Argument leaf type org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema$KafkaSinkContext does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema.serialize(java.lang.Object, org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema$KafkaSinkContext, java.lang.Long): Argument leaf type org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema$KafkaSinkContext does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer.getPartitionOffsets(java.util.Collection, org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer$PartitionOffsetsRetriever): Argument leaf type org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer$PartitionOffsetsRetriever does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer.invoke(org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer$KafkaTransactionState, java.lang.Object, org.apache.flink.streaming.api.functions.sink.SinkFunction$Context): Argument leaf type org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer$KafkaTransactionState does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition.dropLeaderData(java.util.List): Argument leaf type org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartitionLeader does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@ org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema.open(org.ap
org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema.serialize(java.lang.Object, org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema$KafkaSinkContext, java.lang.Long): Argument leaf type org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema$KafkaSinkContext does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer.getPartitionOffsets(java.util.Collection, org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer$PartitionOffsetsRetriever): Argument leaf type org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer$PartitionOffsetsRetriever does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer.invoke(org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer$KafkaTransactionState, java.lang.Object, org.apache.flink.streaming.api.functions.sink.SinkFunction$Context): Argument leaf type org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer$KafkaTransactionState does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition.dropLeaderData(java.util.List): Argument leaf type org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartitionLeader does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition.dropLeaderData(java.util.List): Argument leaf type org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartitionLeader does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import org.apache.flink.connector.kafka.lineage.TypeDatasetFacet;
import org.apache.flink.connector.kafka.lineage.TypeDatasetFacetProvider;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;

import com.google.common.reflect.TypeToken;
import org.apache.kafka.clients.producer.ProducerRecord;
Expand Down Expand Up @@ -103,20 +102,6 @@ public class KafkaRecordSerializationSchemaBuilder<IN> {
@Nullable private SerializationSchema<? super IN> keySerializationSchema;
@Nullable private HeaderProvider<? super IN> headerProvider;

/**
* Sets a custom partitioner determining the target partition of the target topic.
*
* @param partitioner
* @return {@code this}
* @deprecated use {@link #setPartitioner(KafkaPartitioner)}
*/
public <T extends IN> KafkaRecordSerializationSchemaBuilder<T> setPartitioner(
FlinkKafkaPartitioner<? super T> partitioner) {
KafkaRecordSerializationSchemaBuilder<T> self = self();
self.partitioner = checkNotNull(partitioner);
return self;
}

/**
* Sets a custom partitioner determining the target partition of the target topic.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,20 +98,6 @@ public KafkaSinkBuilder<IN> setDeliveryGuarantee(DeliveryGuarantee deliveryGuara
return this;
}

/**
* Sets the wanted the {@link DeliveryGuarantee}. The default delivery guarantee is {@link
* #deliveryGuarantee}.
*
* @param deliveryGuarantee
* @return {@link KafkaSinkBuilder}
* @deprecated Will be removed in future versions. Use {@link #setDeliveryGuarantee} instead.
*/
@Deprecated
public KafkaSinkBuilder<IN> setDeliverGuarantee(DeliveryGuarantee deliveryGuarantee) {
this.deliveryGuarantee = checkNotNull(deliveryGuarantee, "deliveryGuarantee");
return this;
}

/**
* Sets the configuration which used to instantiate all used {@link
* org.apache.kafka.clients.producer.KafkaProducer}.
Expand Down
Loading

0 comments on commit 1f586c9

Please sign in to comment.