Skip to content

Commit f4fbbc0

Browse files
author
Chen Zhiling
authored
DataflowJobManager updates existing job instance (#678)
1 parent 4db87f3 commit f4fbbc0

File tree

1 file changed

+22
-20
lines changed

1 file changed

+22
-20
lines changed

core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java

Lines changed: 22 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,6 @@
4747
import java.util.Collections;
4848
import java.util.List;
4949
import java.util.Map;
50-
import java.util.stream.Collectors;
5150
import lombok.extern.slf4j.Slf4j;
5251
import org.apache.beam.runners.dataflow.DataflowPipelineJob;
5352
import org.apache.beam.runners.dataflow.DataflowRunner;
@@ -120,12 +119,15 @@ public Job startJob(Job job) {
120119
for (FeatureSet featureSet : job.getFeatureSets()) {
121120
featureSetProtos.add(featureSet.toProto());
122121
}
123-
return submitDataflowJob(
124-
job.getId(),
125-
featureSetProtos,
126-
job.getSource().toProto(),
127-
job.getStore().toProto(),
128-
false);
122+
String extId =
123+
submitDataflowJob(
124+
job.getId(),
125+
featureSetProtos,
126+
job.getSource().toProto(),
127+
job.getStore().toProto(),
128+
false);
129+
job.setExtId(extId);
130+
return job;
129131

130132
} catch (InvalidProtocolBufferException e) {
131133
log.error(e.getMessage());
@@ -150,8 +152,17 @@ public Job updateJob(Job job) {
150152
for (FeatureSet featureSet : job.getFeatureSets()) {
151153
featureSetProtos.add(featureSet.toProto());
152154
}
153-
return submitDataflowJob(
154-
job.getId(), featureSetProtos, job.getSource().toProto(), job.getStore().toProto(), true);
155+
156+
String extId =
157+
submitDataflowJob(
158+
job.getId(),
159+
featureSetProtos,
160+
job.getSource().toProto(),
161+
job.getStore().toProto(),
162+
true);
163+
164+
job.setExtId(extId);
165+
return job;
155166
} catch (InvalidProtocolBufferException e) {
156167
log.error(e.getMessage());
157168
throw new IllegalArgumentException(
@@ -236,7 +247,7 @@ public JobStatus getJobStatus(Job job) {
236247
return JobStatus.UNKNOWN;
237248
}
238249

239-
private Job submitDataflowJob(
250+
private String submitDataflowJob(
240251
String jobName,
241252
List<FeatureSetProto.FeatureSet> featureSetProtos,
242253
SourceProto.Source source,
@@ -245,17 +256,8 @@ private Job submitDataflowJob(
245256
try {
246257
ImportOptions pipelineOptions = getPipelineOptions(jobName, featureSetProtos, sink, update);
247258
DataflowPipelineJob pipelineResult = runPipeline(pipelineOptions);
248-
List<FeatureSet> featureSets =
249-
featureSetProtos.stream().map(FeatureSet::fromProto).collect(Collectors.toList());
250259
String jobId = waitForJobToRun(pipelineResult);
251-
return new Job(
252-
jobName,
253-
jobId,
254-
getRunnerType(),
255-
Source.fromProto(source),
256-
Store.fromProto(sink),
257-
featureSets,
258-
JobStatus.PENDING);
260+
return jobId;
259261
} catch (Exception e) {
260262
log.error("Error submitting job", e);
261263
throw new JobExecutionException(String.format("Error running ingestion job: %s", e), e);

0 commit comments

Comments
 (0)