Skip to content

Commit 601033f

Browse files
cheszhilingc
authored andcommitted
Fail Spotless formatting check before tests execute (#487)
* Fail formatting check before tests execute By default, the spotless Maven plugin binds its check goal to the verify phase (late in the lifecycle, after integration tests). Because we currently only run `mvn test` for CI, it doesn't proceed as far as verify so missed formatting is not caught by CI. This binds the check to an earlier phase, in between test-compile and test, so that it will fail before `mvn test` but not disrupt your dev workflow of compiling main and test sources as you work. This strikes a good compromise on failing fast for code standards without being _too_ nagging. For the complete lifecycle reference, see: https://maven.apache.org/guides/introduction/introduction-to-the-lifecycle.html * Apply spotless formatting (cherry picked from commit 6363540)
1 parent 5205d10 commit 601033f

File tree

3 files changed

+18
-13
lines changed

3 files changed

+18
-13
lines changed

ingestion/src/test/java/feast/ingestion/ImportJobTest.java

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -32,14 +32,12 @@
3232
import feast.core.StoreProto.Store.Subscription;
3333
import feast.ingestion.options.BZip2Compressor;
3434
import feast.ingestion.options.ImportOptions;
35-
import feast.ingestion.options.OptionByteConverter;
3635
import feast.storage.RedisProto.RedisKey;
3736
import feast.test.TestUtil;
3837
import feast.test.TestUtil.LocalKafka;
3938
import feast.test.TestUtil.LocalRedis;
4039
import feast.types.FeatureRowProto.FeatureRow;
4140
import feast.types.ValueProto.ValueType.Enum;
42-
import java.io.ByteArrayOutputStream;
4341
import java.io.IOException;
4442
import java.nio.charset.StandardCharsets;
4543
import java.util.ArrayList;
@@ -51,7 +49,6 @@
5149
import org.apache.beam.sdk.PipelineResult;
5250
import org.apache.beam.sdk.PipelineResult.State;
5351
import org.apache.beam.sdk.options.PipelineOptionsFactory;
54-
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream;
5552
import org.apache.kafka.common.serialization.ByteArraySerializer;
5653
import org.joda.time.Duration;
5754
import org.junit.AfterClass;
@@ -166,11 +163,13 @@ public void runPipeline_ShouldWriteToRedisCorrectlyGivenValidSpecAndFeatureRow()
166163
.build();
167164

168165
ImportOptions options = PipelineOptionsFactory.create().as(ImportOptions.class);
169-
BZip2Compressor<FeatureSetSpec> compressor = new BZip2Compressor<>(option -> {
170-
JsonFormat.Printer printer =
171-
JsonFormat.printer().omittingInsignificantWhitespace().printingEnumsAsInts();
172-
return printer.print(option).getBytes();
173-
});
166+
BZip2Compressor<FeatureSetSpec> compressor =
167+
new BZip2Compressor<>(
168+
option -> {
169+
JsonFormat.Printer printer =
170+
JsonFormat.printer().omittingInsignificantWhitespace().printingEnumsAsInts();
171+
return printer.print(option).getBytes();
172+
});
174173
options.setFeatureSetJson(compressor.compress(spec));
175174
options.setStoreJson(Collections.singletonList(JsonFormat.printer().print(redis)));
176175
options.setProject("");

pom.xml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -372,6 +372,16 @@
372372
<removeUnusedImports />
373373
</java>
374374
</configuration>
375+
<executions>
376+
<!-- Move check to fail faster, but after compilation. Default is verify phase -->
377+
<execution>
378+
<id>spotless-check</id>
379+
<phase>process-test-classes</phase>
380+
<goals>
381+
<goal>check</goal>
382+
</goals>
383+
</execution>
384+
</executions>
375385
</plugin>
376386
<plugin>
377387
<groupId>org.apache.maven.plugins</groupId>

serving/src/main/java/feast/serving/specs/CachedSpecService.java

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -195,11 +195,7 @@ private Map<String, String> getFeatureToFeatureSetMapping(
195195
HashMap<String, String> mapping = new HashMap<>();
196196

197197
featureSets.values().stream()
198-
.collect(
199-
groupingBy(
200-
featureSet ->
201-
Pair.of(
202-
featureSet.getProject(), featureSet.getName())))
198+
.collect(groupingBy(featureSet -> Pair.of(featureSet.getProject(), featureSet.getName())))
203199
.forEach(
204200
(group, groupedFeatureSets) -> {
205201
groupedFeatureSets =

0 commit comments

Comments
 (0)