Skip to content

Commit a05cdbc

Browse files
fix: Trying to import pyspark lazily to avoid the dependency on the library (#4091)
1 parent ec19036 commit a05cdbc

File tree

1 file changed

+7
-2
lines changed
  • sdk/python/feast/infra/offline_stores/contrib/spark_offline_store

1 file changed

+7
-2
lines changed

sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,6 @@
44
from enum import Enum
55
from typing import Any, Callable, Dict, Iterable, Optional, Tuple
66

7-
from pyspark.sql import SparkSession
8-
97
from feast import flags_helper
108
from feast.data_source import DataSource
119
from feast.errors import DataSourceNoNameException, DataSourceNotFoundException
@@ -162,6 +160,13 @@ def get_table_column_names_and_types(
162160

163161
def get_table_query_string(self) -> str:
164162
"""Returns a string that can directly be used to reference this table in SQL"""
163+
try:
164+
from pyspark.sql import SparkSession
165+
except ImportError as e:
166+
from feast.errors import FeastExtrasDependencyImportError
167+
168+
raise FeastExtrasDependencyImportError("spark", str(e))
169+
165170
if self.table:
166171
# Backticks make sure that spark sql knows this a table reference.
167172
table = ".".join([f"`{x}`" for x in self.table.split(".")])

0 commit comments

Comments
 (0)