|
16 | 16 | */ |
17 | 17 | package feast.core.service; |
18 | 18 |
|
| 19 | +import static feast.core.model.FeatureSet.parseReference; |
| 20 | + |
19 | 21 | import com.google.protobuf.InvalidProtocolBufferException; |
20 | 22 | import feast.core.config.FeastProperties; |
21 | 23 | import feast.core.config.FeastProperties.JobProperties; |
|
26 | 28 | import feast.core.model.*; |
27 | 29 | import feast.proto.core.CoreServiceProto.ListStoresRequest.Filter; |
28 | 30 | import feast.proto.core.CoreServiceProto.ListStoresResponse; |
| 31 | +import feast.proto.core.FeatureSetProto; |
| 32 | +import feast.proto.core.IngestionJobProto; |
29 | 33 | import feast.proto.core.StoreProto; |
30 | 34 | import feast.proto.core.StoreProto.Store.Subscription; |
31 | 35 | import java.util.ArrayList; |
32 | 36 | import java.util.HashSet; |
33 | 37 | import java.util.List; |
34 | 38 | import java.util.Optional; |
35 | 39 | import java.util.Set; |
36 | | -import java.util.concurrent.ExecutionException; |
37 | | -import java.util.concurrent.ExecutorCompletionService; |
38 | | -import java.util.concurrent.ExecutorService; |
39 | | -import java.util.concurrent.Executors; |
| 40 | +import java.util.concurrent.*; |
40 | 41 | import java.util.stream.Collectors; |
41 | 42 | import javax.validation.constraints.Positive; |
42 | 43 | import lombok.extern.slf4j.Slf4j; |
| 44 | +import org.apache.kafka.clients.consumer.ConsumerRecord; |
43 | 45 | import org.springframework.beans.factory.annotation.Autowired; |
| 46 | +import org.springframework.data.util.Pair; |
| 47 | +import org.springframework.kafka.annotation.KafkaListener; |
| 48 | +import org.springframework.kafka.core.KafkaTemplate; |
44 | 49 | import org.springframework.scheduling.annotation.Scheduled; |
45 | 50 | import org.springframework.stereotype.Service; |
46 | 51 | import org.springframework.transaction.annotation.Transactional; |
|
49 | 54 | @Service |
50 | 55 | public class JobCoordinatorService { |
51 | 56 |
|
| 57 | + private final int SPEC_PUBLISHING_TIMEOUT_SECONDS = 5; |
| 58 | + |
52 | 59 | private final JobRepository jobRepository; |
53 | 60 | private final FeatureSetRepository featureSetRepository; |
54 | 61 | private final SpecService specService; |
55 | 62 | private final JobManager jobManager; |
56 | 63 | private final JobProperties jobProperties; |
| 64 | + private final KafkaTemplate<String, FeatureSetProto.FeatureSetSpec> specPublisher; |
57 | 65 |
|
58 | 66 | @Autowired |
59 | 67 | public JobCoordinatorService( |
60 | 68 | JobRepository jobRepository, |
61 | 69 | FeatureSetRepository featureSetRepository, |
62 | 70 | SpecService specService, |
63 | 71 | JobManager jobManager, |
64 | | - FeastProperties feastProperties) { |
| 72 | + FeastProperties feastProperties, |
| 73 | + KafkaTemplate<String, FeatureSetProto.FeatureSetSpec> specPublisher) { |
65 | 74 | this.jobRepository = jobRepository; |
66 | 75 | this.featureSetRepository = featureSetRepository; |
67 | 76 | this.specService = specService; |
68 | 77 | this.jobManager = jobManager; |
69 | 78 | this.jobProperties = feastProperties.getJobs(); |
| 79 | + this.specPublisher = specPublisher; |
70 | 80 | } |
71 | 81 |
|
72 | 82 | /** |
@@ -153,4 +163,124 @@ public Optional<Job> getJob(Source source, Store store) { |
153 | 163 | // return the latest |
154 | 164 | return Optional.of(jobs.get(0)); |
155 | 165 | } |
| 166 | + |
| 167 | + @Transactional |
| 168 | + @Scheduled(fixedDelayString = "${feast.stream.specsOptions.notifyIntervalMilliseconds}") |
| 169 | + public void notifyJobsWhenFeatureSetUpdated() { |
| 170 | + List<FeatureSet> pendingFeatureSets = |
| 171 | + featureSetRepository.findAllByStatus(FeatureSetProto.FeatureSetStatus.STATUS_PENDING); |
| 172 | + |
| 173 | + pendingFeatureSets.stream() |
| 174 | + .filter( |
| 175 | + fs -> { |
| 176 | + List<FeatureSetJobStatus> runningJobs = |
| 177 | + fs.getJobStatuses().stream() |
| 178 | + .filter(jobStatus -> jobStatus.getJob().isRunning()) |
| 179 | + .collect(Collectors.toList()); |
| 180 | + |
| 181 | + return runningJobs.size() > 0 |
| 182 | + && runningJobs.stream() |
| 183 | + .allMatch(jobStatus -> jobStatus.getVersion() < fs.getVersion()); |
| 184 | + }) |
| 185 | + .forEach( |
| 186 | + fs -> { |
| 187 | + log.info("Sending new FeatureSet {} to Ingestion", fs.getReference()); |
| 188 | + |
| 189 | + // Sending latest version of FeatureSet to all currently running IngestionJobs |
| 190 | + // (there's one topic for all sets). |
| 191 | + // All related jobs would apply new FeatureSet on the fly. |
| 192 | + // In case kafka doesn't respond within SPEC_PUBLISHING_TIMEOUT_SECONDS we will try |
| 193 | + // again later. |
| 194 | + try { |
| 195 | + specPublisher |
| 196 | + .sendDefault(fs.getReference(), fs.toProto().getSpec()) |
| 197 | + .get(SPEC_PUBLISHING_TIMEOUT_SECONDS, TimeUnit.SECONDS); |
| 198 | + } catch (Exception e) { |
| 199 | + log.error( |
| 200 | + "Error occurred while sending FeatureSetSpec to kafka. Cause {}." |
| 201 | + + " Will retry later", |
| 202 | + e.getMessage()); |
| 203 | + return; |
| 204 | + } |
| 205 | + |
| 206 | + // Updating delivery status for related jobs (that are currently using this |
| 207 | + // FeatureSet). |
| 208 | + // We now set status to IN_PROGRESS, so listenAckFromJobs would be able to |
| 209 | + // monitor delivery progress for each new version. |
| 210 | + fs.getJobStatuses().stream() |
| 211 | + .filter(s -> s.getJob().isRunning()) |
| 212 | + .forEach( |
| 213 | + jobStatus -> { |
| 214 | + jobStatus.setDeliveryStatus( |
| 215 | + FeatureSetProto.FeatureSetJobDeliveryStatus.STATUS_IN_PROGRESS); |
| 216 | + jobStatus.setVersion(fs.getVersion()); |
| 217 | + }); |
| 218 | + featureSetRepository.saveAndFlush(fs); |
| 219 | + }); |
| 220 | + } |
| 221 | + |
| 222 | + /** |
| 223 | + * Listener for ACK messages coming from IngestionJob when FeatureSetSpec is installed (in |
| 224 | + * pipeline). |
| 225 | + * |
| 226 | + * <p>Updates FeatureSetJobStatus for respected FeatureSet (selected by reference) and Job (select |
| 227 | + * by Id). |
| 228 | + * |
| 229 | + * <p>When all related (running) to FeatureSet jobs are updated - FeatureSet receives READY status |
| 230 | + * |
| 231 | + * @param record ConsumerRecord with key: FeatureSet reference and value: Ack message |
| 232 | + */ |
| 233 | + @KafkaListener(topics = {"${feast.stream.specsOptions.specsAckTopic}"}) |
| 234 | + @Transactional |
| 235 | + public void listenAckFromJobs( |
| 236 | + ConsumerRecord<String, IngestionJobProto.FeatureSetSpecAck> record) { |
| 237 | + String setReference = record.key(); |
| 238 | + Pair<String, String> projectAndSetName = parseReference(setReference); |
| 239 | + FeatureSet featureSet = |
| 240 | + featureSetRepository.findFeatureSetByNameAndProject_Name( |
| 241 | + projectAndSetName.getSecond(), projectAndSetName.getFirst()); |
| 242 | + if (featureSet == null) { |
| 243 | + log.warn( |
| 244 | + String.format("ACKListener received message for unknown FeatureSet %s", setReference)); |
| 245 | + return; |
| 246 | + } |
| 247 | + |
| 248 | + int ackVersion = record.value().getFeatureSetVersion(); |
| 249 | + |
| 250 | + if (featureSet.getVersion() != ackVersion) { |
| 251 | + log.warn( |
| 252 | + String.format( |
| 253 | + "ACKListener received outdated ack for %s. Current %d, Received %d", |
| 254 | + setReference, featureSet.getVersion(), ackVersion)); |
| 255 | + return; |
| 256 | + } |
| 257 | + |
| 258 | + log.info("Updating featureSet {} delivery statuses.", featureSet.getReference()); |
| 259 | + |
| 260 | + featureSet.getJobStatuses().stream() |
| 261 | + .filter( |
| 262 | + js -> |
| 263 | + js.getJob().getId().equals(record.value().getJobName()) |
| 264 | + && js.getVersion() == ackVersion) |
| 265 | + .findFirst() |
| 266 | + .ifPresent( |
| 267 | + featureSetJobStatus -> |
| 268 | + featureSetJobStatus.setDeliveryStatus( |
| 269 | + FeatureSetProto.FeatureSetJobDeliveryStatus.STATUS_DELIVERED)); |
| 270 | + |
| 271 | + boolean allDelivered = |
| 272 | + featureSet.getJobStatuses().stream() |
| 273 | + .filter(js -> js.getJob().isRunning()) |
| 274 | + .allMatch( |
| 275 | + js -> |
| 276 | + js.getDeliveryStatus() |
| 277 | + .equals(FeatureSetProto.FeatureSetJobDeliveryStatus.STATUS_DELIVERED)); |
| 278 | + |
| 279 | + if (allDelivered) { |
| 280 | + log.info("FeatureSet {} update is completely delivered", featureSet.getReference()); |
| 281 | + |
| 282 | + featureSet.setStatus(FeatureSetProto.FeatureSetStatus.STATUS_READY); |
| 283 | + featureSetRepository.saveAndFlush(featureSet); |
| 284 | + } |
| 285 | + } |
156 | 286 | } |
0 commit comments