Browse Source

MINOR: Fix a number of warnings in clients test (#8073)

Reviewers: Ismael Juma <ismael@juma.me.uk>, Andrew Choi <li_andchoi@microsoft.com>
pull/8142/head
Mickael Maison 5 years ago committed by GitHub
parent
commit
8ab0994919
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 4
      clients/src/test/java/org/apache/kafka/clients/consumer/ConsumerConfigTest.java
  2. 5
      clients/src/test/java/org/apache/kafka/clients/consumer/internals/ConsumerCoordinatorTest.java
  3. 11
      clients/src/test/java/org/apache/kafka/clients/consumer/internals/FetcherTest.java
  4. 1
      clients/src/test/java/org/apache/kafka/clients/consumer/internals/PartitionAssignorAdapterTest.java
  5. 16
      clients/src/test/java/org/apache/kafka/clients/producer/KafkaProducerTest.java
  6. 2
      clients/src/test/java/org/apache/kafka/common/config/AbstractConfigTest.java
  7. 1
      clients/src/test/java/org/apache/kafka/common/metrics/stats/FrequenciesTest.java
  8. 1
      clients/src/test/java/org/apache/kafka/common/network/SslTransportLayerTest.java
  9. 2
      clients/src/test/java/org/apache/kafka/common/protocol/ErrorsTest.java
  10. 12
      clients/src/test/java/org/apache/kafka/common/requests/RequestResponseTest.java
  11. 3
      clients/src/test/java/org/apache/kafka/common/security/authenticator/SaslServerAuthenticatorTest.java
  12. 9
      clients/src/test/java/org/apache/kafka/common/security/ssl/SslFactoryTest.java
  13. 4
      clients/src/test/java/org/apache/kafka/common/utils/UtilsTest.java
  14. 16
      clients/src/test/java/org/apache/kafka/test/TestUtils.java

4
clients/src/test/java/org/apache/kafka/clients/consumer/ConsumerConfigTest.java

@ -29,8 +29,8 @@ import static org.junit.Assert.assertEquals;
public class ConsumerConfigTest { public class ConsumerConfigTest {
private final Deserializer keyDeserializer = new ByteArrayDeserializer(); private final Deserializer<byte[]> keyDeserializer = new ByteArrayDeserializer();
private final Deserializer valueDeserializer = new StringDeserializer(); private final Deserializer<String> valueDeserializer = new StringDeserializer();
private final String keyDeserializerClassName = keyDeserializer.getClass().getName(); private final String keyDeserializerClassName = keyDeserializer.getClass().getName();
private final String valueDeserializerClassName = valueDeserializer.getClass().getName(); private final String valueDeserializerClassName = valueDeserializer.getClass().getName();
private final Object keyDeserializerClass = keyDeserializer.getClass(); private final Object keyDeserializerClass = keyDeserializer.getClass();

5
clients/src/test/java/org/apache/kafka/clients/consumer/internals/ConsumerCoordinatorTest.java

@ -53,7 +53,6 @@ import org.apache.kafka.common.message.OffsetCommitResponseData;
import org.apache.kafka.common.message.SyncGroupResponseData; import org.apache.kafka.common.message.SyncGroupResponseData;
import org.apache.kafka.common.metrics.KafkaMetric; import org.apache.kafka.common.metrics.KafkaMetric;
import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.protocol.Errors; import org.apache.kafka.common.protocol.Errors;
import org.apache.kafka.common.record.RecordBatch; import org.apache.kafka.common.record.RecordBatch;
import org.apache.kafka.common.requests.AbstractRequest; import org.apache.kafka.common.requests.AbstractRequest;
@ -252,10 +251,6 @@ public class ConsumerCoordinatorTest {
return metrics.metrics().get(metrics.metricName(name, "consumer" + groupId + "-coordinator-metrics")); return metrics.metrics().get(metrics.metricName(name, "consumer" + groupId + "-coordinator-metrics"));
} }
private Sensor getSensor(final String name) {
return metrics.sensor(name);
}
@Test @Test
public void testSelectRebalanceProtcol() { public void testSelectRebalanceProtcol() {
List<ConsumerPartitionAssignor> assignors = new ArrayList<>(); List<ConsumerPartitionAssignor> assignors = new ArrayList<>();

11
clients/src/test/java/org/apache/kafka/clients/consumer/internals/FetcherTest.java

@ -134,7 +134,6 @@ import static org.junit.Assert.fail;
import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.spy; import static org.mockito.Mockito.spy;
@SuppressWarnings("deprecation")
public class FetcherTest { public class FetcherTest {
private static final double EPSILON = 0.0001; private static final double EPSILON = 0.0001;
@ -2011,7 +2010,7 @@ public class FetcherTest {
ClientRequest request = client.newClientRequest(node.idString(), builder, time.milliseconds(), true); ClientRequest request = client.newClientRequest(node.idString(), builder, time.milliseconds(), true);
client.send(request, time.milliseconds()); client.send(request, time.milliseconds());
client.poll(1, time.milliseconds()); client.poll(1, time.milliseconds());
FetchResponse response = fullFetchResponse(tp0, nextRecords, Errors.NONE, i, throttleTimeMs); FetchResponse<MemoryRecords> response = fullFetchResponse(tp0, nextRecords, Errors.NONE, i, throttleTimeMs);
buffer = response.serialize(ApiKeys.FETCH, buffer = response.serialize(ApiKeys.FETCH,
ApiKeys.FETCH.latestVersion(), ApiKeys.FETCH.latestVersion(),
request.correlationId()); request.correlationId());
@ -3104,7 +3103,7 @@ public class FetcherTest {
2, 0L, null, this.records)); 2, 0L, null, this.records));
partitions1.put(tp1, new FetchResponse.PartitionData<>(Errors.NONE, 100L, partitions1.put(tp1, new FetchResponse.PartitionData<>(Errors.NONE, 100L,
FetchResponse.INVALID_LAST_STABLE_OFFSET, 0L, null, emptyRecords)); FetchResponse.INVALID_LAST_STABLE_OFFSET, 0L, null, emptyRecords));
FetchResponse resp1 = new FetchResponse<>(Errors.NONE, partitions1, 0, 123); FetchResponse<MemoryRecords> resp1 = new FetchResponse<>(Errors.NONE, partitions1, 0, 123);
client.prepareResponse(resp1); client.prepareResponse(resp1);
assertEquals(1, fetcher.sendFetches()); assertEquals(1, fetcher.sendFetches());
assertFalse(fetcher.hasCompletedFetches()); assertFalse(fetcher.hasCompletedFetches());
@ -3130,7 +3129,7 @@ public class FetcherTest {
// The second response contains no new records. // The second response contains no new records.
LinkedHashMap<TopicPartition, FetchResponse.PartitionData<MemoryRecords>> partitions2 = new LinkedHashMap<>(); LinkedHashMap<TopicPartition, FetchResponse.PartitionData<MemoryRecords>> partitions2 = new LinkedHashMap<>();
FetchResponse resp2 = new FetchResponse<>(Errors.NONE, partitions2, 0, 123); FetchResponse<MemoryRecords> resp2 = new FetchResponse<>(Errors.NONE, partitions2, 0, 123);
client.prepareResponse(resp2); client.prepareResponse(resp2);
assertEquals(1, fetcher.sendFetches()); assertEquals(1, fetcher.sendFetches());
consumerClient.poll(time.timer(0)); consumerClient.poll(time.timer(0));
@ -3143,7 +3142,7 @@ public class FetcherTest {
LinkedHashMap<TopicPartition, FetchResponse.PartitionData<MemoryRecords>> partitions3 = new LinkedHashMap<>(); LinkedHashMap<TopicPartition, FetchResponse.PartitionData<MemoryRecords>> partitions3 = new LinkedHashMap<>();
partitions3.put(tp0, new FetchResponse.PartitionData<>(Errors.NONE, 100L, partitions3.put(tp0, new FetchResponse.PartitionData<>(Errors.NONE, 100L,
4, 0L, null, this.nextRecords)); 4, 0L, null, this.nextRecords));
FetchResponse resp3 = new FetchResponse<>(Errors.NONE, partitions3, 0, 123); FetchResponse<MemoryRecords> resp3 = new FetchResponse<>(Errors.NONE, partitions3, 0, 123);
client.prepareResponse(resp3); client.prepareResponse(resp3);
assertEquals(1, fetcher.sendFetches()); assertEquals(1, fetcher.sendFetches());
consumerClient.poll(time.timer(0)); consumerClient.poll(time.timer(0));
@ -3202,7 +3201,7 @@ public class FetcherTest {
} }
@Override @Override
public boolean handleResponse(FetchResponse response) { public boolean handleResponse(FetchResponse<?> response) {
verifySessionPartitions(); verifySessionPartitions();
return handler.handleResponse(response); return handler.handleResponse(response);
} }

1
clients/src/test/java/org/apache/kafka/clients/consumer/internals/PartitionAssignorAdapterTest.java

@ -101,7 +101,6 @@ public class PartitionAssignorAdapterTest {
} }
@Test @Test
@SuppressWarnings("deprecation")
public void testOnAssignment() { public void testOnAssignment() {
OldPartitionAssignor oldAssignor = new OldPartitionAssignor(); OldPartitionAssignor oldAssignor = new OldPartitionAssignor();
ConsumerPartitionAssignor adaptedAssignor = new PartitionAssignorAdapter(oldAssignor); ConsumerPartitionAssignor adaptedAssignor = new PartitionAssignorAdapter(oldAssignor);

16
clients/src/test/java/org/apache/kafka/clients/producer/KafkaProducerTest.java

@ -133,8 +133,8 @@ public class KafkaProducerTest {
Properties props = new Properties(); Properties props = new Properties();
props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999");
props.setProperty(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "transactionalId"); props.setProperty(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "transactionalId");
props.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, new StringSerializer().getClass().getName()); props.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, new StringSerializer().getClass().getName()); props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
ProducerConfig config = new ProducerConfig(props); ProducerConfig config = new ProducerConfig(props);
assertTrue(config.getBoolean(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG)); assertTrue(config.getBoolean(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG));
@ -457,7 +457,7 @@ public class KafkaProducerTest {
// Four request updates where the topic isn't present, at which point the timeout expires and a // Four request updates where the topic isn't present, at which point the timeout expires and a
// TimeoutException is thrown // TimeoutException is thrown
Future future = producer.send(record); Future<RecordMetadata> future = producer.send(record);
verify(metadata, times(4)).requestUpdateForTopic(topic); verify(metadata, times(4)).requestUpdateForTopic(topic);
verify(metadata, times(4)).awaitUpdate(anyInt(), anyLong()); verify(metadata, times(4)).awaitUpdate(anyInt(), anyLong());
verify(metadata, times(5)).fetch(); verify(metadata, times(5)).fetch();
@ -533,7 +533,7 @@ public class KafkaProducerTest {
// Four request updates where the requested partition is out of range, at which point the timeout expires // Four request updates where the requested partition is out of range, at which point the timeout expires
// and a TimeoutException is thrown // and a TimeoutException is thrown
Future future = producer.send(record); Future<RecordMetadata> future = producer.send(record);
verify(metadata, times(4)).requestUpdateForTopic(topic); verify(metadata, times(4)).requestUpdateForTopic(topic);
verify(metadata, times(4)).awaitUpdate(anyInt(), anyLong()); verify(metadata, times(4)).awaitUpdate(anyInt(), anyLong());
verify(metadata, times(5)).fetch(); verify(metadata, times(5)).fetch();
@ -644,9 +644,7 @@ public class KafkaProducerTest {
private <T extends Serializer<String>> void doTestHeaders(Class<T> serializerClassToMock) { private <T extends Serializer<String>> void doTestHeaders(Class<T> serializerClassToMock) {
Map<String, Object> configs = new HashMap<>(); Map<String, Object> configs = new HashMap<>();
configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999");
@SuppressWarnings("unchecked") // it is safe to suppress, since this is a mock class
Serializer<String> keySerializer = mock(serializerClassToMock); Serializer<String> keySerializer = mock(serializerClassToMock);
@SuppressWarnings("unchecked")
Serializer<String> valueSerializer = mock(serializerClassToMock); Serializer<String> valueSerializer = mock(serializerClassToMock);
long nowMs = Time.SYSTEM.milliseconds(); long nowMs = Time.SYSTEM.milliseconds();
@ -689,7 +687,7 @@ public class KafkaProducerTest {
public void closeShouldBeIdempotent() { public void closeShouldBeIdempotent() {
Properties producerProps = new Properties(); Properties producerProps = new Properties();
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000");
Producer producer = new KafkaProducer<>(producerProps, new ByteArraySerializer(), new ByteArraySerializer()); Producer<byte[], byte[]> producer = new KafkaProducer<>(producerProps, new ByteArraySerializer(), new ByteArraySerializer());
producer.close(); producer.close();
producer.close(); producer.close();
} }
@ -698,12 +696,12 @@ public class KafkaProducerTest {
public void testMetricConfigRecordingLevel() { public void testMetricConfigRecordingLevel() {
Properties props = new Properties(); Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000");
try (KafkaProducer producer = new KafkaProducer<>(props, new ByteArraySerializer(), new ByteArraySerializer())) { try (KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(props, new ByteArraySerializer(), new ByteArraySerializer())) {
assertEquals(Sensor.RecordingLevel.INFO, producer.metrics.config().recordLevel()); assertEquals(Sensor.RecordingLevel.INFO, producer.metrics.config().recordLevel());
} }
props.put(ProducerConfig.METRICS_RECORDING_LEVEL_CONFIG, "DEBUG"); props.put(ProducerConfig.METRICS_RECORDING_LEVEL_CONFIG, "DEBUG");
try (KafkaProducer producer = new KafkaProducer<>(props, new ByteArraySerializer(), new ByteArraySerializer())) { try (KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(props, new ByteArraySerializer(), new ByteArraySerializer())) {
assertEquals(Sensor.RecordingLevel.DEBUG, producer.metrics.config().recordLevel()); assertEquals(Sensor.RecordingLevel.DEBUG, producer.metrics.config().recordLevel());
} }
} }

2
clients/src/test/java/org/apache/kafka/common/config/AbstractConfigTest.java

@ -407,7 +407,7 @@ public class AbstractConfigTest {
"org.apache.kafka.common.config.provider.InvalidConfigProvider"); "org.apache.kafka.common.config.provider.InvalidConfigProvider");
props.put("testKey", "${test:/foo/bar/testpath:testKey}"); props.put("testKey", "${test:/foo/bar/testpath:testKey}");
try { try {
TestIndirectConfigResolution config = new TestIndirectConfigResolution(props); new TestIndirectConfigResolution(props);
fail("Expected a config exception due to invalid props :" + props); fail("Expected a config exception due to invalid props :" + props);
} catch (KafkaException e) { } catch (KafkaException e) {
// this is good // this is good

1
clients/src/test/java/org/apache/kafka/common/metrics/stats/FrequenciesTest.java

@ -101,7 +101,6 @@ public class FrequenciesTest {
} }
@Test @Test
@SuppressWarnings("deprecation")
public void testUseWithMetrics() { public void testUseWithMetrics() {
MetricName name1 = name("1"); MetricName name1 = name("1");
MetricName name2 = name("2"); MetricName name2 = name("2");

1
clients/src/test/java/org/apache/kafka/common/network/SslTransportLayerTest.java

@ -277,7 +277,6 @@ public class SslTransportLayerTest {
sslServerConfigs = getTrustingConfig(serverCertStores, clientCertStores); sslServerConfigs = getTrustingConfig(serverCertStores, clientCertStores);
sslClientConfigs = getTrustingConfig(clientCertStores, serverCertStores); sslClientConfigs = getTrustingConfig(clientCertStores, serverCertStores);
SecurityProtocol securityProtocol = SecurityProtocol.SSL;
server = createEchoServer(SecurityProtocol.SSL); server = createEchoServer(SecurityProtocol.SSL);
InetSocketAddress addr = new InetSocketAddress("localhost", server.port()); InetSocketAddress addr = new InetSocketAddress("localhost", server.port());

2
clients/src/test/java/org/apache/kafka/common/protocol/ErrorsTest.java

@ -40,7 +40,7 @@ public class ErrorsTest {
@Test @Test
public void testUniqueExceptions() { public void testUniqueExceptions() {
Set<Class> exceptionSet = new HashSet<>(); Set<Class<?>> exceptionSet = new HashSet<>();
for (Errors error : Errors.values()) { for (Errors error : Errors.values()) {
if (error != Errors.NONE) if (error != Errors.NONE)
exceptionSet.add(error.exception().getClass()); exceptionSet.add(error.exception().getClass());

12
clients/src/test/java/org/apache/kafka/common/requests/RequestResponseTest.java

@ -642,7 +642,7 @@ public class RequestResponseTest {
6, FetchResponse.INVALID_LOG_START_OFFSET, Optional.empty(), emptyList(), records)); 6, FetchResponse.INVALID_LOG_START_OFFSET, Optional.empty(), emptyList(), records));
FetchResponse<MemoryRecords> response = new FetchResponse<>(Errors.NONE, responseData, 10, INVALID_SESSION_ID); FetchResponse<MemoryRecords> response = new FetchResponse<>(Errors.NONE, responseData, 10, INVALID_SESSION_ID);
FetchResponse deserialized = FetchResponse.parse(toBuffer(response.toStruct((short) 4)), (short) 4); FetchResponse<MemoryRecords> deserialized = FetchResponse.parse(toBuffer(response.toStruct((short) 4)), (short) 4);
assertEquals(responseData, deserialized.responseData()); assertEquals(responseData, deserialized.responseData());
} }
@ -656,7 +656,7 @@ public class RequestResponseTest {
} }
} }
private void verifyFetchResponseFullWrite(short apiVersion, FetchResponse fetchResponse) throws Exception { private void verifyFetchResponseFullWrite(short apiVersion, FetchResponse<MemoryRecords> fetchResponse) throws Exception {
int correlationId = 15; int correlationId = 15;
short responseHeaderVersion = FETCH.responseHeaderVersion(apiVersion); short responseHeaderVersion = FETCH.responseHeaderVersion(apiVersion);
@ -1038,14 +1038,6 @@ public class RequestResponseTest {
.setProtocolName("range") // Added in v5 but ignorable .setProtocolName("range") // Added in v5 but ignorable
.setAssignments(assignments); .setAssignments(assignments);
JoinGroupRequestData.JoinGroupRequestProtocolCollection protocols =
new JoinGroupRequestData.JoinGroupRequestProtocolCollection(
Collections.singleton(
new JoinGroupRequestData.JoinGroupRequestProtocol()
.setName("consumer-range")
.setMetadata(new byte[0])).iterator()
);
// v3 and above could set group instance id // v3 and above could set group instance id
if (version >= 3) if (version >= 3)
data.setGroupInstanceId("groupInstanceId"); data.setGroupInstanceId("groupInstanceId");

3
clients/src/test/java/org/apache/kafka/common/security/authenticator/SaslServerAuthenticatorTest.java

@ -31,7 +31,6 @@ import org.apache.kafka.common.security.auth.AuthenticateCallbackHandler;
import org.apache.kafka.common.security.auth.SecurityProtocol; import org.apache.kafka.common.security.auth.SecurityProtocol;
import org.apache.kafka.common.protocol.types.Struct; import org.apache.kafka.common.protocol.types.Struct;
import org.apache.kafka.common.requests.RequestHeader; import org.apache.kafka.common.requests.RequestHeader;
import org.apache.kafka.common.security.JaasContext;
import org.apache.kafka.common.security.plain.PlainLoginModule; import org.apache.kafka.common.security.plain.PlainLoginModule;
import org.apache.kafka.common.utils.AppInfoParser; import org.apache.kafka.common.utils.AppInfoParser;
import org.apache.kafka.common.utils.Time; import org.apache.kafka.common.utils.Time;
@ -152,8 +151,6 @@ public class SaslServerAuthenticatorTest {
String mechanism, ChannelMetadataRegistry metadataRegistry) throws IOException { String mechanism, ChannelMetadataRegistry metadataRegistry) throws IOException {
TestJaasConfig jaasConfig = new TestJaasConfig(); TestJaasConfig jaasConfig = new TestJaasConfig();
jaasConfig.addEntry("jaasContext", PlainLoginModule.class.getName(), new HashMap<String, Object>()); jaasConfig.addEntry("jaasContext", PlainLoginModule.class.getName(), new HashMap<String, Object>());
Map<String, JaasContext> jaasContexts = Collections.singletonMap(mechanism,
new JaasContext("jaasContext", JaasContext.Type.SERVER, jaasConfig, null));
Map<String, Subject> subjects = Collections.singletonMap(mechanism, new Subject()); Map<String, Subject> subjects = Collections.singletonMap(mechanism, new Subject());
Map<String, AuthenticateCallbackHandler> callbackHandlers = Collections.singletonMap( Map<String, AuthenticateCallbackHandler> callbackHandlers = Collections.singletonMap(
mechanism, new SaslServerCallbackHandler()); mechanism, new SaslServerCallbackHandler());

9
clients/src/test/java/org/apache/kafka/common/security/ssl/SslFactoryTest.java

@ -376,15 +376,6 @@ public class SslFactoryTest {
); );
} }
private SslEngineBuilder.SecurityStore sslTrustStore(Map<String, Object> sslConfig) {
return new SslEngineBuilder.SecurityStore(
(String) sslConfig.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG),
(String) sslConfig.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG),
(Password) sslConfig.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG),
null
);
}
private TestSslUtils.SslConfigsBuilder sslConfigsBuilder(Mode mode) { private TestSslUtils.SslConfigsBuilder sslConfigsBuilder(Mode mode) {
return new TestSslUtils.SslConfigsBuilder(mode).tlsProtocol(tlsProtocol); return new TestSslUtils.SslConfigsBuilder(mode).tlsProtocol(tlsProtocol);
} }

4
clients/src/test/java/org/apache/kafka/common/utils/UtilsTest.java

@ -72,8 +72,8 @@ public class UtilsTest {
cases.put("lkjh234lh9fiuh90y23oiuhsafujhadof229phr9h19h89h8".getBytes(), -58897971); cases.put("lkjh234lh9fiuh90y23oiuhsafujhadof229phr9h19h89h8".getBytes(), -58897971);
cases.put(new byte[]{'a', 'b', 'c'}, 479470107); cases.put(new byte[]{'a', 'b', 'c'}, 479470107);
for (Map.Entry c : cases.entrySet()) { for (Map.Entry<byte[], Integer> c : cases.entrySet()) {
assertEquals((int) c.getValue(), murmur2((byte[]) c.getKey())); assertEquals(c.getValue().intValue(), murmur2(c.getKey()));
} }
} }

16
clients/src/test/java/org/apache/kafka/test/TestUtils.java

@ -276,8 +276,8 @@ public class TestUtils {
} }
public static Properties producerConfig(final String bootstrapServers, public static Properties producerConfig(final String bootstrapServers,
final Class keySerializer, final Class<?> keySerializer,
final Class valueSerializer, final Class<?> valueSerializer,
final Properties additional) { final Properties additional) {
final Properties properties = new Properties(); final Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
@ -288,14 +288,14 @@ public class TestUtils {
return properties; return properties;
} }
public static Properties producerConfig(final String bootstrapServers, final Class keySerializer, final Class valueSerializer) { public static Properties producerConfig(final String bootstrapServers, final Class<?> keySerializer, final Class<?> valueSerializer) {
return producerConfig(bootstrapServers, keySerializer, valueSerializer, new Properties()); return producerConfig(bootstrapServers, keySerializer, valueSerializer, new Properties());
} }
public static Properties consumerConfig(final String bootstrapServers, public static Properties consumerConfig(final String bootstrapServers,
final String groupId, final String groupId,
final Class keyDeserializer, final Class<?> keyDeserializer,
final Class valueDeserializer, final Class<?> valueDeserializer,
final Properties additional) { final Properties additional) {
final Properties consumerConfig = new Properties(); final Properties consumerConfig = new Properties();
@ -310,8 +310,8 @@ public class TestUtils {
public static Properties consumerConfig(final String bootstrapServers, public static Properties consumerConfig(final String bootstrapServers,
final String groupId, final String groupId,
final Class keyDeserializer, final Class<?> keyDeserializer,
final Class valueDeserializer) { final Class<?> valueDeserializer) {
return consumerConfig(bootstrapServers, return consumerConfig(bootstrapServers,
groupId, groupId,
keyDeserializer, keyDeserializer,
@ -322,7 +322,7 @@ public class TestUtils {
/** /**
* returns consumer config with random UUID for the Group ID * returns consumer config with random UUID for the Group ID
*/ */
public static Properties consumerConfig(final String bootstrapServers, final Class keyDeserializer, final Class valueDeserializer) { public static Properties consumerConfig(final String bootstrapServers, final Class<?> keyDeserializer, final Class<?> valueDeserializer) {
return consumerConfig(bootstrapServers, return consumerConfig(bootstrapServers,
UUID.randomUUID().toString(), UUID.randomUUID().toString(),
keyDeserializer, keyDeserializer,

Loading…
Cancel
Save