Skip to content

Commit c4bcb02

Browse files
Oleksii MoskalenkoWirickwoop
authored
Integration Test for Job Coordinator (#886)
* BaseIT & JobCoordinatorIT * it action * check fail * --no-transfer-progress * pr comments * comments for BaseIT * comments for BaseIT * Update POM to pass maven verify * Update make command to support verify for integration tests * skip unit tests in e2e * skip unit tests in docker build * build verbose * run exec.jar * serving docker build w/o tests * using exec jar in docker * unpack exec jar * unpack exec jar * pr comments * clean collector registry Co-authored-by: Christopher Wirick <crwirick@gmail.com> Co-authored-by: Willem Pienaar <git@willem.co>
1 parent 554324d commit c4bcb02

File tree

20 files changed

+1013
-28
lines changed

20 files changed

+1013
-28
lines changed
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
name: integration tests
2+
on: [push, pull_request]
3+
jobs:
4+
maven-integration-test:
5+
runs-on: ubuntu-latest
6+
name: Maven Integration Test
7+
steps:
8+
- uses: actions/checkout@v2
9+
- name: Set up JDK 1.8
10+
uses: actions/setup-java@v1
11+
with:
12+
java-version: '11'
13+
java-package: jdk
14+
architecture: x64
15+
- name: Run integration tests
16+
run: make test-java-integration

Makefile

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,9 @@ lint-java:
4646
test-java:
4747
mvn --no-transfer-progress test
4848

49+
test-java-integration:
50+
mvn --no-transfer-progress -Dmaven.javadoc.skip=true -Dgpg.skip -DskipUTs=true clean verify
51+
4952
test-java-with-coverage:
5053
mvn --no-transfer-progress test jacoco:report-aggregate
5154

@@ -175,4 +178,4 @@ build-html: clean-html
175178

176179
# Versions
177180
lint-versions:
178-
./infra/scripts/validate-version-consistency.sh
181+
./infra/scripts/validate-version-consistency.sh

core/pom.xml

Lines changed: 43 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -312,8 +312,50 @@
312312
<dependency>
313313
<groupId>org.springframework</groupId>
314314
<artifactId>spring-test</artifactId>
315-
<version>5.1.3.RELEASE</version>
315+
<version>5.2.5.RELEASE</version>
316316
<scope>test</scope>
317317
</dependency>
318+
<dependency>
319+
<groupId>org.junit.jupiter</groupId>
320+
<artifactId>junit-jupiter</artifactId>
321+
<version>5.6.2</version>
322+
<scope>test</scope>
323+
</dependency>
324+
<dependency>
325+
<groupId>org.testcontainers</groupId>
326+
<artifactId>testcontainers</artifactId>
327+
<version>1.14.3</version>
328+
<scope>test</scope>
329+
</dependency>
330+
<dependency>
331+
<groupId>org.testcontainers</groupId>
332+
<artifactId>junit-jupiter</artifactId>
333+
<version>1.14.3</version>
334+
<scope>test</scope>
335+
</dependency>
336+
<dependency>
337+
<groupId>org.testcontainers</groupId>
338+
<artifactId>postgresql</artifactId>
339+
<version>1.14.3</version>
340+
<scope>test</scope>
341+
</dependency>
342+
<dependency>
343+
<groupId>org.testcontainers</groupId>
344+
<artifactId>kafka</artifactId>
345+
<version>1.14.3</version>
346+
<scope>test</scope>
347+
</dependency>
348+
<dependency>
349+
<groupId>org.awaitility</groupId>
350+
<artifactId>awaitility</artifactId>
351+
<version>3.0.0</version>
352+
<scope>test</scope>
353+
</dependency>
354+
<dependency>
355+
<groupId>org.awaitility</groupId>
356+
<artifactId>awaitility-proxy</artifactId>
357+
<version>3.0.0</version>
358+
<scope>test</scope>
359+
</dependency>
318360
</dependencies>
319361
</project>

core/src/main/java/feast/core/config/FeatureStreamConfig.java

Lines changed: 37 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,10 @@
3737
import org.springframework.beans.factory.annotation.Autowired;
3838
import org.springframework.context.annotation.Bean;
3939
import org.springframework.context.annotation.Configuration;
40+
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
41+
import org.springframework.kafka.config.KafkaListenerContainerFactory;
4042
import org.springframework.kafka.core.*;
43+
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
4144

4245
@Slf4j
4346
@Configuration
@@ -94,6 +97,13 @@ public NewTopic featureSetSpecsAckTopic(FeastProperties feastProperties) {
9497
(short) 1);
9598
}
9699

100+
/**
101+
* Creates kafka publisher for sending FeatureSetSpecs to ingestion job. Uses ProtoSerializer to
102+
* serialize FeatureSetSpec.
103+
*
104+
* @param feastProperties
105+
* @return
106+
*/
97107
@Bean
98108
public KafkaTemplate<String, FeatureSetProto.FeatureSetSpec> specKafkaTemplate(
99109
FeastProperties feastProperties) {
@@ -112,8 +122,34 @@ public KafkaTemplate<String, FeatureSetProto.FeatureSetSpec> specKafkaTemplate(
112122
return t;
113123
}
114124

125+
/**
126+
* Set configured consumerFactory for specs acknowledgment topic (see ackConsumerFactory) as
127+
* default for KafkaListener.
128+
*
129+
* @param consumerFactory
130+
* @return
131+
*/
115132
@Bean
116-
public ConsumerFactory<?, ?> ackConsumerFactory(FeastProperties feastProperties) {
133+
KafkaListenerContainerFactory<
134+
ConcurrentMessageListenerContainer<String, IngestionJobProto.FeatureSetSpecAck>>
135+
kafkaAckListenerContainerFactory(
136+
ConsumerFactory<String, IngestionJobProto.FeatureSetSpecAck> consumerFactory) {
137+
ConcurrentKafkaListenerContainerFactory<String, IngestionJobProto.FeatureSetSpecAck> factory =
138+
new ConcurrentKafkaListenerContainerFactory<>();
139+
factory.setConsumerFactory(consumerFactory);
140+
return factory;
141+
}
142+
143+
/**
144+
* Prepares kafka consumer (by configuring ConsumerFactory) to receive acknowledgments from
145+
* IngestionJob on successful updates of FeatureSetSpecs.
146+
*
147+
* @param feastProperties
148+
* @return ConsumerFactory for FeatureSetSpecAck
149+
*/
150+
@Bean
151+
public ConsumerFactory<String, IngestionJobProto.FeatureSetSpecAck> ackConsumerFactory(
152+
FeastProperties feastProperties) {
117153
StreamProperties streamProperties = feastProperties.getStream();
118154
Map<String, Object> props = new HashMap<>();
119155

core/src/main/java/feast/core/config/JobConfig.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
import feast.proto.core.SourceProto;
3535
import java.util.Map;
3636
import lombok.extern.slf4j.Slf4j;
37-
import org.springframework.beans.factory.annotation.Autowired;
37+
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
3838
import org.springframework.context.annotation.Bean;
3939
import org.springframework.context.annotation.Configuration;
4040

@@ -94,7 +94,7 @@ public JobGroupingStrategy getJobGroupingStrategy(
9494
* @param feastProperties feast config properties
9595
*/
9696
@Bean
97-
@Autowired
97+
@ConditionalOnMissingBean
9898
public JobManager getJobManager(
9999
FeastProperties feastProperties,
100100
IngestionJobProto.SpecsStreamingUpdateConfig specsStreamingUpdateConfig)

core/src/main/java/feast/core/model/AbstractTimestampEntity.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ public abstract class AbstractTimestampEntity {
3333
private Date created;
3434

3535
@Temporal(TemporalType.TIMESTAMP)
36-
@Column(name = "lastUpdated", nullable = false)
36+
@Column(name = "last_updated", nullable = false)
3737
private Date lastUpdated;
3838

3939
@PrePersist

core/src/main/java/feast/core/service/JobCoordinatorService.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -408,7 +408,9 @@ public void notifyJobsWhenFeatureSetUpdated() {
408408
*
409409
* @param record ConsumerRecord with key: FeatureSet reference and value: Ack message
410410
*/
411-
@KafkaListener(topics = {"${feast.stream.specsOptions.specsAckTopic}"})
411+
@KafkaListener(
412+
topics = {"${feast.stream.specsOptions.specsAckTopic}"},
413+
containerFactory = "kafkaAckListenerContainerFactory")
412414
@Transactional
413415
public void listenAckFromJobs(
414416
ConsumerRecord<String, IngestionJobProto.FeatureSetSpecAck> record) {
Lines changed: 202 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,202 @@
1+
/*
2+
* SPDX-License-Identifier: Apache-2.0
3+
* Copyright 2018-2020 The Feast Authors
4+
*
5+
* Licensed under the Apache License, Version 2.0 (the "License");
6+
* you may not use this file except in compliance with the License.
7+
* You may obtain a copy of the License at
8+
*
9+
* https://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
package feast.core.it;
18+
19+
import feast.core.config.FeastProperties;
20+
import feast.core.util.KafkaSerialization;
21+
import feast.proto.core.IngestionJobProto;
22+
import io.prometheus.client.CollectorRegistry;
23+
import java.sql.Connection;
24+
import java.sql.SQLException;
25+
import java.sql.Statement;
26+
import java.util.HashMap;
27+
import java.util.List;
28+
import java.util.Map;
29+
import java.util.stream.Collectors;
30+
import javax.persistence.EntityManager;
31+
import javax.persistence.PersistenceContext;
32+
import javax.persistence.Table;
33+
import org.apache.kafka.clients.consumer.ConsumerConfig;
34+
import org.apache.kafka.clients.producer.ProducerConfig;
35+
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
36+
import org.apache.kafka.common.serialization.StringDeserializer;
37+
import org.apache.kafka.common.serialization.StringSerializer;
38+
import org.hibernate.engine.spi.SessionImplementor;
39+
import org.junit.jupiter.api.*;
40+
import org.springframework.boot.test.context.SpringBootTest;
41+
import org.springframework.context.annotation.Bean;
42+
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
43+
import org.springframework.kafka.config.KafkaListenerContainerFactory;
44+
import org.springframework.kafka.core.ConsumerFactory;
45+
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
46+
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
47+
import org.springframework.kafka.core.KafkaTemplate;
48+
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
49+
import org.springframework.test.annotation.DirtiesContext;
50+
import org.springframework.test.context.ActiveProfiles;
51+
import org.springframework.test.context.DynamicPropertyRegistry;
52+
import org.springframework.test.context.DynamicPropertySource;
53+
import org.testcontainers.containers.KafkaContainer;
54+
import org.testcontainers.containers.PostgreSQLContainer;
55+
import org.testcontainers.junit.jupiter.Container;
56+
import org.testcontainers.junit.jupiter.Testcontainers;
57+
58+
/**
59+
* Base Integration Test class. Setups postgres and kafka containers. Configures related properties
60+
* and beans. Provides DB related clean up between tests.
61+
*/
62+
@SpringBootTest
63+
@ActiveProfiles("it")
64+
@Testcontainers
65+
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
66+
public class BaseIT {
67+
68+
@Container public static PostgreSQLContainer<?> postgreSQLContainer = new PostgreSQLContainer<>();
69+
70+
@Container public static KafkaContainer kafka = new KafkaContainer();
71+
72+
/**
73+
* Configure Spring Application to use postgres and kafka rolled out in containers
74+
*
75+
* @param registry
76+
*/
77+
@DynamicPropertySource
78+
static void properties(DynamicPropertyRegistry registry) {
79+
80+
registry.add("spring.datasource.url", postgreSQLContainer::getJdbcUrl);
81+
registry.add("spring.datasource.username", postgreSQLContainer::getUsername);
82+
registry.add("spring.datasource.password", postgreSQLContainer::getPassword);
83+
registry.add("spring.jpa.hibernate.ddl-auto", () -> "none");
84+
85+
registry.add("feast.stream.options.bootstrapServers", kafka::getBootstrapServers);
86+
}
87+
88+
/**
89+
* SequentialFlow is base class that is supposed to be inherited by @Nested test classes that
90+
* wants to preserve context between test cases. For SequentialFlow databases is being truncated
91+
* only once after all tests passed.
92+
*/
93+
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
94+
public class SequentialFlow {
95+
@AfterAll
96+
public void tearDown() throws Exception {
97+
cleanTables(entityManager);
98+
}
99+
}
100+
101+
/**
102+
* This class must be inherited inside IT Class and annotated with {@link
103+
* org.springframework.boot.test.context.TestConfiguration}. It provides configuration needed to
104+
* communicate with Feast via Kafka
105+
*/
106+
public static class BaseTestConfig {
107+
@Bean
108+
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, byte[]>>
109+
testListenerContainerFactory(ConsumerFactory<String, byte[]> consumerFactory) {
110+
ConcurrentKafkaListenerContainerFactory<String, byte[]> factory =
111+
new ConcurrentKafkaListenerContainerFactory<>();
112+
factory.setConsumerFactory(consumerFactory);
113+
return factory;
114+
}
115+
116+
@Bean
117+
public ConsumerFactory<String, byte[]> testConsumerFactory() {
118+
Map<String, Object> props = new HashMap<>();
119+
120+
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());
121+
props.put(ConsumerConfig.GROUP_ID_CONFIG, "test");
122+
123+
return new DefaultKafkaConsumerFactory<>(
124+
props, new StringDeserializer(), new ByteArrayDeserializer());
125+
}
126+
127+
@Bean
128+
public KafkaTemplate<String, IngestionJobProto.FeatureSetSpecAck> specAckKafkaTemplate(
129+
FeastProperties feastProperties) {
130+
FeastProperties.StreamProperties streamProperties = feastProperties.getStream();
131+
Map<String, Object> props = new HashMap<>();
132+
133+
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());
134+
135+
KafkaTemplate<String, IngestionJobProto.FeatureSetSpecAck> t =
136+
new KafkaTemplate<>(
137+
new DefaultKafkaProducerFactory<>(
138+
props, new StringSerializer(), new KafkaSerialization.ProtoSerializer<>()));
139+
t.setDefaultTopic(streamProperties.getSpecsOptions().getSpecsAckTopic());
140+
return t;
141+
}
142+
}
143+
144+
/**
145+
* Truncates all tables in Database (between tests or flows). Retries on deadlock
146+
*
147+
* @param em EntityManager
148+
* @throws SQLException
149+
*/
150+
public static void cleanTables(EntityManager em) throws SQLException {
151+
List<String> tableNames =
152+
em.getMetamodel().getEntities().stream()
153+
.map(e -> e.getJavaType().getAnnotation(Table.class).name())
154+
.collect(Collectors.toList());
155+
156+
// this trick needed to get EntityManager with Transaction
157+
// and we don't want to wrap whole class into @Transactional
158+
em = em.getEntityManagerFactory().createEntityManager();
159+
// Transaction needed only once to do unwrap
160+
SessionImplementor session = em.unwrap(SessionImplementor.class);
161+
162+
// and here we're actually don't want any transactions
163+
// but instead we pulling raw connection
164+
// to be able to retry query if needed
165+
// since retrying rollbacked transaction is not that easy
166+
Connection connection = session.connection();
167+
168+
// retries are needed since truncate require exclusive lock
169+
// and that often leads to Deadlock
170+
// since SpringApp is still running in another thread
171+
var num_retries = 5;
172+
for (var i = 1; i <= num_retries; i++) {
173+
try {
174+
Statement statement = connection.createStatement();
175+
statement.execute(String.format("truncate %s cascade", String.join(", ", tableNames)));
176+
} catch (SQLException e) {
177+
if (i == num_retries) {
178+
throw e;
179+
}
180+
continue;
181+
}
182+
183+
break;
184+
}
185+
}
186+
187+
@PersistenceContext EntityManager entityManager;
188+
189+
/** Used to determine SequentialFlows */
190+
public Boolean isNestedTest(TestInfo testInfo) {
191+
return testInfo.getTestClass().get().getAnnotation(Nested.class) != null;
192+
}
193+
194+
@AfterEach
195+
public void tearDown(TestInfo testInfo) throws Exception {
196+
CollectorRegistry.defaultRegistry.clear();
197+
198+
if (!isNestedTest(testInfo)) {
199+
cleanTables(entityManager);
200+
}
201+
}
202+
}

0 commit comments

Comments
 (0)