forked from feast-dev/feast
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest.py
More file actions
65 lines (48 loc) · 1.8 KB
/
test.py
File metadata and controls
65 lines (48 loc) · 1.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from datetime import datetime, timedelta
import pandas as pd
from driver_repo import driver, driver_stats_fv
from feast import FeatureStore
def main():
pd.set_option("display.max_columns", None)
pd.set_option("display.width", 1000)
# Load the feature store from the current path
fs = FeatureStore(repo_path=".")
# Deploy the feature store to AWS
print("Deploying feature store to AWS...")
fs.apply([driver, driver_stats_fv])
# Select features
features = ["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"]
# Create an entity dataframe. This is the dataframe that will be enriched with historical features
entity_df = pd.DataFrame(
{
"event_timestamp": [
pd.Timestamp(dt, unit="ms", tz="UTC").round("ms")
for dt in pd.date_range(
start=datetime.now() - timedelta(days=3),
end=datetime.now(),
periods=3,
)
],
"driver_id": [1001, 1002, 1003],
}
)
print("Retrieving training data...")
# Retrieve historical features by joining the entity dataframe to the Redshift table source
training_df = fs.get_historical_features(
features=features, entity_df=entity_df
).to_df()
print()
print(training_df)
print()
print("Loading features into the online store...")
fs.materialize_incremental(end_date=datetime.now())
print()
print("Retrieving online features...")
# Retrieve features from the online store (Firestore)
online_features = fs.get_online_features(
features=features, entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}],
).to_dict()
print()
print(pd.DataFrame.from_dict(online_features))
if __name__ == "__main__":
main()