Skip to content

Commit 47b6d03

Browse files
committed
reformat using the google-java-format plugin
1 parent f019e61 commit 47b6d03

File tree

3 files changed

+105
-119
lines changed

3 files changed

+105
-119
lines changed

ingestion/src/main/java/feast/ingestion/deserializer/FeatureRowDeserializer.java

Lines changed: 13 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -2,32 +2,29 @@
22

33
import com.google.protobuf.InvalidProtocolBufferException;
44
import feast.types.FeatureRowProto.FeatureRow;
5+
import java.util.Map;
56
import org.apache.kafka.common.errors.SerializationException;
67
import org.apache.kafka.common.serialization.Deserializer;
78

8-
import java.util.Map;
9-
109
/**
1110
* Deserializer for Kafka to deserialize Protocol Buffers messages
1211
*
1312
* @param <FeatureRow> Protobuf message type
1413
*/
1514
public class FeatureRowDeserializer implements Deserializer<FeatureRow> {
1615

17-
@Override
18-
public void configure(Map configs, boolean isKey) {
19-
}
16+
@Override
17+
public void configure(Map configs, boolean isKey) {}
2018

21-
@Override
22-
public FeatureRow deserialize(String topic, byte[] data) {
23-
try {
24-
return FeatureRow.parseFrom(data);
25-
} catch (InvalidProtocolBufferException e) {
26-
throw new SerializationException("Error deserializing FeatureRow from Protobuf message", e);
27-
}
19+
@Override
20+
public FeatureRow deserialize(String topic, byte[] data) {
21+
try {
22+
return FeatureRow.parseFrom(data);
23+
} catch (InvalidProtocolBufferException e) {
24+
throw new SerializationException("Error deserializing FeatureRow from Protobuf message", e);
2825
}
26+
}
2927

30-
@Override
31-
public void close() {
32-
}
33-
}
28+
@Override
29+
public void close() {}
30+
}
Lines changed: 15 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,31 @@
11
package feast.ingestion.deserializer;
22

33
import com.google.protobuf.InvalidProtocolBufferException;
4-
import feast.types.FeatureRowProto.*;
4+
import feast.types.FeatureRowProto.FeatureRowKey;
5+
import java.util.Map;
56
import org.apache.kafka.common.errors.SerializationException;
67
import org.apache.kafka.common.serialization.Deserializer;
78

8-
import java.util.Map;
9-
109
/**
1110
* Deserializer for Kafka to deserialize Protocol Buffers messages
1211
*
1312
* @param <FeatureRowKey> Protobuf message type
1413
*/
1514
public class FeatureRowKeyDeserializer implements Deserializer<FeatureRowKey> {
1615

17-
@Override
18-
public void configure(Map configs, boolean isKey) {
19-
}
16+
@Override
17+
public void configure(Map configs, boolean isKey) {}
2018

21-
@Override
22-
public FeatureRowKey deserialize(String topic, byte[] data) {
23-
try {
24-
return FeatureRowKey.parseFrom(data);
25-
} catch (InvalidProtocolBufferException e) {
26-
throw new SerializationException("Error deserializing FeatureRowKey from Protobuf message", e);
27-
}
19+
@Override
20+
public FeatureRowKey deserialize(String topic, byte[] data) {
21+
try {
22+
return FeatureRowKey.parseFrom(data);
23+
} catch (InvalidProtocolBufferException e) {
24+
throw new SerializationException(
25+
"Error deserializing FeatureRowKey from Protobuf message", e);
2826
}
27+
}
2928

30-
@Override
31-
public void close() {
32-
}
33-
}
29+
@Override
30+
public void close() {}
31+
}
Lines changed: 77 additions & 86 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,12 @@
11
package feast.ingestion.deserializer;
22

3-
43
import com.google.protobuf.MessageLite;
54
import feast.types.FeatureRowProto.FeatureRow;
5+
import java.util.Map;
6+
import java.util.UUID;
7+
import java.util.concurrent.BlockingQueue;
8+
import java.util.concurrent.ExecutionException;
9+
import java.util.concurrent.LinkedBlockingQueue;
610
import org.apache.kafka.clients.consumer.ConsumerRecord;
711
import org.apache.kafka.clients.producer.ProducerRecord;
812
import org.apache.kafka.common.KafkaException;
@@ -14,7 +18,11 @@
1418
import org.springframework.beans.factory.annotation.Autowired;
1519
import org.springframework.context.annotation.Bean;
1620
import org.springframework.context.annotation.Configuration;
17-
import org.springframework.kafka.core.*;
21+
import org.springframework.kafka.core.ConsumerFactory;
22+
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
23+
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
24+
import org.springframework.kafka.core.KafkaTemplate;
25+
import org.springframework.kafka.core.ProducerFactory;
1826
import org.springframework.kafka.listener.ContainerProperties;
1927
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
2028
import org.springframework.kafka.listener.MessageListener;
@@ -27,101 +35,84 @@
2735
import org.springframework.test.context.junit4.SpringRunner;
2836
import org.springframework.util.concurrent.ListenableFuture;
2937

30-
import java.util.Map;
31-
import java.util.UUID;
32-
import java.util.concurrent.BlockingQueue;
33-
import java.util.concurrent.ExecutionException;
34-
import java.util.concurrent.LinkedBlockingQueue;
35-
3638
@RunWith(SpringRunner.class)
3739
@EmbeddedKafka(controlledShutdown = true)
3840
public class KafkaFeatureRowDeserializerTest {
3941

40-
@Configuration
41-
static class ContextConfiguration {
42+
@Autowired private EmbeddedKafkaBroker embeddedKafka;
43+
@Autowired private KafkaTemplate<byte[], byte[]> template;
4244

43-
@Autowired
44-
private EmbeddedKafkaBroker embeddedKafka;
45+
private <MessageType extends MessageLite> void deserialize(MessageType input) {
46+
// generate a random UUID to create a unique topic and consumer group id for each test
47+
String uuid = UUID.randomUUID().toString();
48+
String topic = "topic-" + uuid;
4549

46-
@Bean
47-
ProducerFactory<byte[], byte[]> producerFactory() {
48-
Map<String, Object> producerProps = KafkaTestUtils.producerProps(embeddedKafka);
50+
embeddedKafka.addTopics(topic);
4951

50-
return new DefaultKafkaProducerFactory<>(producerProps,
51-
new ByteArraySerializer(),
52-
new ByteArraySerializer());
53-
}
52+
Deserializer deserializer = new FeatureRowDeserializer();
5453

55-
@Bean
56-
KafkaTemplate<byte[], byte[]> kafkaTemplate() {
57-
return new KafkaTemplate<>(
58-
producerFactory(),
59-
true);
60-
}
54+
Map<String, Object> consumerProps =
55+
KafkaTestUtils.consumerProps(uuid, Boolean.FALSE.toString(), embeddedKafka);
56+
ConsumerFactory<FeatureRow, FeatureRow> consumerFactory =
57+
new DefaultKafkaConsumerFactory<>(consumerProps, deserializer, deserializer);
58+
59+
BlockingQueue<ConsumerRecord<FeatureRow, FeatureRow>> records = new LinkedBlockingQueue<>();
60+
ContainerProperties containerProps = new ContainerProperties(topic);
61+
containerProps.setMessageListener((MessageListener<FeatureRow, FeatureRow>) records::add);
62+
63+
MessageListenerContainer container =
64+
new KafkaMessageListenerContainer<>(consumerFactory, containerProps);
65+
container.start();
66+
ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());
67+
68+
byte[] data = input.toByteArray();
69+
ProducerRecord<byte[], byte[]> producerRecord = new ProducerRecord<>(topic, data, data);
70+
ListenableFuture<SendResult<byte[], byte[]>> producerFuture = template.send(producerRecord);
71+
72+
try {
73+
producerFuture.get();
74+
} catch (InterruptedException e) {
75+
return;
76+
} catch (ExecutionException e) {
77+
throw new KafkaException("Error sending message to Kafka.", e.getCause());
78+
}
79+
80+
ConsumerRecord<FeatureRow, FeatureRow> consumerRecord;
81+
try {
82+
consumerRecord = records.take();
83+
} catch (InterruptedException e) {
84+
return;
6185
}
6286

63-
@Autowired
64-
private EmbeddedKafkaBroker embeddedKafka;
65-
66-
@Autowired
67-
private KafkaTemplate<byte[], byte[]> template;
68-
69-
private <MessageType extends MessageLite> void deserialize(MessageType input) {
70-
// generate a random UUID to create a unique topic and consumer group id for each test
71-
String uuid = UUID.randomUUID().toString();
72-
String topic = "topic-" + uuid;
73-
74-
embeddedKafka.addTopics(topic);
75-
76-
Deserializer deserializer = new FeatureRowDeserializer();
77-
78-
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps(
79-
uuid, Boolean.FALSE.toString(), embeddedKafka);
80-
ConsumerFactory<FeatureRow, FeatureRow> consumerFactory = new DefaultKafkaConsumerFactory<>(
81-
consumerProps,
82-
deserializer, deserializer);
83-
84-
BlockingQueue<ConsumerRecord<FeatureRow, FeatureRow>> records = new LinkedBlockingQueue<>();
85-
ContainerProperties containerProps = new ContainerProperties(topic);
86-
containerProps.setMessageListener((MessageListener<FeatureRow, FeatureRow>) records::add);
87-
88-
MessageListenerContainer container = new KafkaMessageListenerContainer<>(
89-
consumerFactory,
90-
containerProps);
91-
container.start();
92-
ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());
93-
94-
byte[] data = input.toByteArray();
95-
ProducerRecord<byte[], byte[]> producerRecord = new ProducerRecord<>(topic, data, data);
96-
ListenableFuture<SendResult<byte[], byte[]>> producerFuture = template.send(producerRecord);
97-
98-
try {
99-
producerFuture.get();
100-
} catch (InterruptedException e) {
101-
return;
102-
} catch (ExecutionException e) {
103-
throw new KafkaException("Error sending message to Kafka.", e.getCause());
104-
}
105-
106-
ConsumerRecord<FeatureRow, FeatureRow> consumerRecord;
107-
try {
108-
consumerRecord = records.take();
109-
} catch (InterruptedException e) {
110-
return;
111-
}
112-
113-
FeatureRow key = consumerRecord.key();
114-
Assert.assertEquals(key, input);
115-
116-
FeatureRow value = consumerRecord.value();
117-
Assert.assertEquals(value, input);
87+
FeatureRow key = consumerRecord.key();
88+
Assert.assertEquals(key, input);
89+
90+
FeatureRow value = consumerRecord.value();
91+
Assert.assertEquals(value, input);
92+
}
93+
94+
@Test(timeout = 10000)
95+
public void deserializeFeatureRowProto() {
96+
FeatureRow message = FeatureRow.newBuilder().setEntityName("test").build();
97+
deserialize(message);
98+
}
99+
100+
@Configuration
101+
static class ContextConfiguration {
102+
103+
@Autowired private EmbeddedKafkaBroker embeddedKafka;
104+
105+
@Bean
106+
ProducerFactory<byte[], byte[]> producerFactory() {
107+
Map<String, Object> producerProps = KafkaTestUtils.producerProps(embeddedKafka);
108+
109+
return new DefaultKafkaProducerFactory<>(
110+
producerProps, new ByteArraySerializer(), new ByteArraySerializer());
118111
}
119112

120-
@Test(timeout = 10000)
121-
public void deserializeFeatureRowProto() {
122-
FeatureRow message = FeatureRow.newBuilder()
123-
.setEntityName("test")
124-
.build();
125-
deserialize(message);
113+
@Bean
114+
KafkaTemplate<byte[], byte[]> kafkaTemplate() {
115+
return new KafkaTemplate<>(producerFactory(), true);
126116
}
117+
}
127118
}

0 commit comments

Comments
 (0)