|
| 1 | +/* |
| 2 | + * SPDX-License-Identifier: Apache-2.0 |
| 3 | + * Copyright 2018-2020 The Feast Authors |
| 4 | + * |
| 5 | + * Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | + * you may not use this file except in compliance with the License. |
| 7 | + * You may obtain a copy of the License at |
| 8 | + * |
| 9 | + * https://www.apache.org/licenses/LICENSE-2.0 |
| 10 | + * |
| 11 | + * Unless required by applicable law or agreed to in writing, software |
| 12 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | + * See the License for the specific language governing permissions and |
| 15 | + * limitations under the License. |
| 16 | + */ |
| 17 | +package feast.core.it; |
| 18 | + |
| 19 | +import feast.core.config.FeastProperties; |
| 20 | +import feast.core.util.KafkaSerialization; |
| 21 | +import feast.proto.core.IngestionJobProto; |
| 22 | +import io.prometheus.client.CollectorRegistry; |
| 23 | +import java.sql.Connection; |
| 24 | +import java.sql.SQLException; |
| 25 | +import java.sql.Statement; |
| 26 | +import java.util.HashMap; |
| 27 | +import java.util.List; |
| 28 | +import java.util.Map; |
| 29 | +import java.util.stream.Collectors; |
| 30 | +import javax.persistence.EntityManager; |
| 31 | +import javax.persistence.PersistenceContext; |
| 32 | +import javax.persistence.Table; |
| 33 | +import org.apache.kafka.clients.consumer.ConsumerConfig; |
| 34 | +import org.apache.kafka.clients.producer.ProducerConfig; |
| 35 | +import org.apache.kafka.common.serialization.ByteArrayDeserializer; |
| 36 | +import org.apache.kafka.common.serialization.StringDeserializer; |
| 37 | +import org.apache.kafka.common.serialization.StringSerializer; |
| 38 | +import org.hibernate.engine.spi.SessionImplementor; |
| 39 | +import org.junit.jupiter.api.*; |
| 40 | +import org.springframework.boot.test.context.SpringBootTest; |
| 41 | +import org.springframework.context.annotation.Bean; |
| 42 | +import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; |
| 43 | +import org.springframework.kafka.config.KafkaListenerContainerFactory; |
| 44 | +import org.springframework.kafka.core.ConsumerFactory; |
| 45 | +import org.springframework.kafka.core.DefaultKafkaConsumerFactory; |
| 46 | +import org.springframework.kafka.core.DefaultKafkaProducerFactory; |
| 47 | +import org.springframework.kafka.core.KafkaTemplate; |
| 48 | +import org.springframework.kafka.listener.ConcurrentMessageListenerContainer; |
| 49 | +import org.springframework.test.annotation.DirtiesContext; |
| 50 | +import org.springframework.test.context.ActiveProfiles; |
| 51 | +import org.springframework.test.context.DynamicPropertyRegistry; |
| 52 | +import org.springframework.test.context.DynamicPropertySource; |
| 53 | +import org.testcontainers.containers.KafkaContainer; |
| 54 | +import org.testcontainers.containers.PostgreSQLContainer; |
| 55 | +import org.testcontainers.junit.jupiter.Container; |
| 56 | +import org.testcontainers.junit.jupiter.Testcontainers; |
| 57 | + |
| 58 | +/** |
| 59 | + * Base Integration Test class. Setups postgres and kafka containers. Configures related properties |
| 60 | + * and beans. Provides DB related clean up between tests. |
| 61 | + */ |
| 62 | +@SpringBootTest |
| 63 | +@ActiveProfiles("it") |
| 64 | +@Testcontainers |
| 65 | +@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) |
| 66 | +public class BaseIT { |
| 67 | + |
| 68 | + @Container public static PostgreSQLContainer<?> postgreSQLContainer = new PostgreSQLContainer<>(); |
| 69 | + |
| 70 | + @Container public static KafkaContainer kafka = new KafkaContainer(); |
| 71 | + |
| 72 | + /** |
| 73 | + * Configure Spring Application to use postgres and kafka rolled out in containers |
| 74 | + * |
| 75 | + * @param registry |
| 76 | + */ |
| 77 | + @DynamicPropertySource |
| 78 | + static void properties(DynamicPropertyRegistry registry) { |
| 79 | + |
| 80 | + registry.add("spring.datasource.url", postgreSQLContainer::getJdbcUrl); |
| 81 | + registry.add("spring.datasource.username", postgreSQLContainer::getUsername); |
| 82 | + registry.add("spring.datasource.password", postgreSQLContainer::getPassword); |
| 83 | + registry.add("spring.jpa.hibernate.ddl-auto", () -> "none"); |
| 84 | + |
| 85 | + registry.add("feast.stream.options.bootstrapServers", kafka::getBootstrapServers); |
| 86 | + } |
| 87 | + |
| 88 | + /** |
| 89 | + * SequentialFlow is base class that is supposed to be inherited by @Nested test classes that |
| 90 | + * wants to preserve context between test cases. For SequentialFlow databases is being truncated |
| 91 | + * only once after all tests passed. |
| 92 | + */ |
| 93 | + @TestInstance(TestInstance.Lifecycle.PER_CLASS) |
| 94 | + public class SequentialFlow { |
| 95 | + @AfterAll |
| 96 | + public void tearDown() throws Exception { |
| 97 | + cleanTables(entityManager); |
| 98 | + } |
| 99 | + } |
| 100 | + |
| 101 | + /** |
| 102 | + * This class must be inherited inside IT Class and annotated with {@link |
| 103 | + * org.springframework.boot.test.context.TestConfiguration}. It provides configuration needed to |
| 104 | + * communicate with Feast via Kafka |
| 105 | + */ |
| 106 | + public static class BaseTestConfig { |
| 107 | + @Bean |
| 108 | + public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, byte[]>> |
| 109 | + testListenerContainerFactory(ConsumerFactory<String, byte[]> consumerFactory) { |
| 110 | + ConcurrentKafkaListenerContainerFactory<String, byte[]> factory = |
| 111 | + new ConcurrentKafkaListenerContainerFactory<>(); |
| 112 | + factory.setConsumerFactory(consumerFactory); |
| 113 | + return factory; |
| 114 | + } |
| 115 | + |
| 116 | + @Bean |
| 117 | + public ConsumerFactory<String, byte[]> testConsumerFactory() { |
| 118 | + Map<String, Object> props = new HashMap<>(); |
| 119 | + |
| 120 | + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers()); |
| 121 | + props.put(ConsumerConfig.GROUP_ID_CONFIG, "test"); |
| 122 | + |
| 123 | + return new DefaultKafkaConsumerFactory<>( |
| 124 | + props, new StringDeserializer(), new ByteArrayDeserializer()); |
| 125 | + } |
| 126 | + |
| 127 | + @Bean |
| 128 | + public KafkaTemplate<String, IngestionJobProto.FeatureSetSpecAck> specAckKafkaTemplate( |
| 129 | + FeastProperties feastProperties) { |
| 130 | + FeastProperties.StreamProperties streamProperties = feastProperties.getStream(); |
| 131 | + Map<String, Object> props = new HashMap<>(); |
| 132 | + |
| 133 | + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers()); |
| 134 | + |
| 135 | + KafkaTemplate<String, IngestionJobProto.FeatureSetSpecAck> t = |
| 136 | + new KafkaTemplate<>( |
| 137 | + new DefaultKafkaProducerFactory<>( |
| 138 | + props, new StringSerializer(), new KafkaSerialization.ProtoSerializer<>())); |
| 139 | + t.setDefaultTopic(streamProperties.getSpecsOptions().getSpecsAckTopic()); |
| 140 | + return t; |
| 141 | + } |
| 142 | + } |
| 143 | + |
| 144 | + /** |
| 145 | + * Truncates all tables in Database (between tests or flows). Retries on deadlock |
| 146 | + * |
| 147 | + * @param em EntityManager |
| 148 | + * @throws SQLException |
| 149 | + */ |
| 150 | + public static void cleanTables(EntityManager em) throws SQLException { |
| 151 | + List<String> tableNames = |
| 152 | + em.getMetamodel().getEntities().stream() |
| 153 | + .map(e -> e.getJavaType().getAnnotation(Table.class).name()) |
| 154 | + .collect(Collectors.toList()); |
| 155 | + |
| 156 | + // this trick needed to get EntityManager with Transaction |
| 157 | + // and we don't want to wrap whole class into @Transactional |
| 158 | + em = em.getEntityManagerFactory().createEntityManager(); |
| 159 | + // Transaction needed only once to do unwrap |
| 160 | + SessionImplementor session = em.unwrap(SessionImplementor.class); |
| 161 | + |
| 162 | + // and here we're actually don't want any transactions |
| 163 | + // but instead we pulling raw connection |
| 164 | + // to be able to retry query if needed |
| 165 | + // since retrying rollbacked transaction is not that easy |
| 166 | + Connection connection = session.connection(); |
| 167 | + |
| 168 | + // retries are needed since truncate require exclusive lock |
| 169 | + // and that often leads to Deadlock |
| 170 | + // since SpringApp is still running in another thread |
| 171 | + var num_retries = 5; |
| 172 | + for (var i = 1; i <= num_retries; i++) { |
| 173 | + try { |
| 174 | + Statement statement = connection.createStatement(); |
| 175 | + statement.execute(String.format("truncate %s cascade", String.join(", ", tableNames))); |
| 176 | + } catch (SQLException e) { |
| 177 | + if (i == num_retries) { |
| 178 | + throw e; |
| 179 | + } |
| 180 | + continue; |
| 181 | + } |
| 182 | + |
| 183 | + break; |
| 184 | + } |
| 185 | + } |
| 186 | + |
| 187 | + @PersistenceContext EntityManager entityManager; |
| 188 | + |
| 189 | + /** Used to determine SequentialFlows */ |
| 190 | + public Boolean isNestedTest(TestInfo testInfo) { |
| 191 | + return testInfo.getTestClass().get().getAnnotation(Nested.class) != null; |
| 192 | + } |
| 193 | + |
| 194 | + @AfterEach |
| 195 | + public void tearDown(TestInfo testInfo) throws Exception { |
| 196 | + CollectorRegistry.defaultRegistry.clear(); |
| 197 | + |
| 198 | + if (!isNestedTest(testInfo)) { |
| 199 | + cleanTables(entityManager); |
| 200 | + } |
| 201 | + } |
| 202 | +} |
0 commit comments