Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ import org.locationtech.geomesa.spark.jts.encoders.SpatialEncoders
import org.locationtech.rasterframes.model.{CellContext, LongExtent, TileContext, TileDataContext}
import frameless.TypedEncoder
import geotrellis.raster.mapalgebra.focal.{Kernel, Neighborhood, TargetCell}
import org.locationtech.rasterframes.ref.RFRasterSource
import org.locationtech.rasterframes.tiles.ProjectedRasterTile

import java.net.URI
import java.sql.Timestamp
Expand All @@ -45,14 +47,14 @@ trait StandardEncoders extends SpatialEncoders with TypedEncoders {
implicit def optionalEncoder[T: TypedEncoder]: ExpressionEncoder[Option[T]] = typedExpressionEncoder[Option[T]]

implicit lazy val strMapEncoder: ExpressionEncoder[Map[String, String]] = ExpressionEncoder()
implicit lazy val crsExpressionEncoder: ExpressionEncoder[CRS] = ExpressionEncoder()
implicit lazy val projectedExtentEncoder: ExpressionEncoder[ProjectedExtent] = ExpressionEncoder()
implicit lazy val temporalProjectedExtentEncoder: ExpressionEncoder[TemporalProjectedExtent] = ExpressionEncoder()
implicit lazy val timestampEncoder: ExpressionEncoder[Timestamp] = ExpressionEncoder()
implicit lazy val cellStatsEncoder: ExpressionEncoder[CellStatistics] = ExpressionEncoder()
implicit lazy val cellHistEncoder: ExpressionEncoder[CellHistogram] = ExpressionEncoder()
implicit lazy val localCellStatsEncoder: ExpressionEncoder[LocalCellStatistics] = ExpressionEncoder()

implicit lazy val crsExpressionEncoder: ExpressionEncoder[CRS] = typedExpressionEncoder
implicit lazy val uriEncoder: ExpressionEncoder[URI] = typedExpressionEncoder[URI]
implicit lazy val neighborhoodEncoder: ExpressionEncoder[Neighborhood] = typedExpressionEncoder[Neighborhood]
implicit lazy val targetCellEncoder: ExpressionEncoder[TargetCell] = typedExpressionEncoder[TargetCell]
Expand All @@ -78,6 +80,11 @@ trait StandardEncoders extends SpatialEncoders with TypedEncoders {

implicit lazy val tileEncoder: ExpressionEncoder[Tile] = typedExpressionEncoder
implicit def rasterEncoder[T <: CellGrid[Int]: TypedEncoder]: ExpressionEncoder[Raster[T]] = typedExpressionEncoder[Raster[T]]

// Intentionally not implicit, defined as implicit in the ProjectedRasterTile companion object
lazy val projectedRasterTileEncoder: ExpressionEncoder[ProjectedRasterTile] = typedExpressionEncoder
// Intentionally not implicit, defined as implicit in the RFRasterSource companion object
lazy val rfRasterSourceEncoder: ExpressionEncoder[RFRasterSource] = typedExpressionEncoder
}

object StandardEncoders extends StandardEncoders
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.catalyst.util.QuantileSummaries
import org.apache.spark.sql.rf.{CrsUDT, RasterSourceUDT, TileUDT}
import org.locationtech.jts.geom.Envelope
import org.locationtech.rasterframes.ref.RFRasterSource
import org.locationtech.rasterframes.tiles.ProjectedRasterTile
import org.locationtech.rasterframes.util.{FocalNeighborhood, FocalTargetCell, KryoSupport}

import java.net.URI
Expand All @@ -23,6 +25,8 @@ trait TypedEncoders {
implicit val tileUDT = new TileUDT
implicit val rasterSourceUDT = new RasterSourceUDT

implicit val crsTypedEncoder: TypedEncoder[CRS] = TypedEncoder.usingUserDefinedType[CRS]

implicit val cellTypeInjection: Injection[CellType, String] = Injection(_.toString, CellType.fromName)
implicit val cellTypeTypedEncoder: TypedEncoder[CellType] = TypedEncoder.usingInjection[CellType, String]

Expand Down Expand Up @@ -89,7 +93,20 @@ trait TypedEncoders {
implicit val tileTypedEncoder: TypedEncoder[Tile] = TypedEncoder.usingUserDefinedType[Tile]
implicit def rasterTileTypedEncoder[T <: CellGrid[Int]: TypedEncoder]: TypedEncoder[Raster[T]] = TypedEncoder.usingDerivation

// Derivation is done through frameless to trigger RasterSourceUDT load
implicit val rfRasterSourceTypedEncoder: TypedEncoder[RFRasterSource] = TypedEncoder.usingUserDefinedType[RFRasterSource]

implicit val kernelTypedEncoder: TypedEncoder[Kernel] = TypedEncoder.usingDerivation

// Derivation is done through frameless to trigger the TileUDT and CrsUDT load
implicit val projectedRasterTileTypedEncoder: TypedEncoder[ProjectedRasterTile] =
ManualTypedEncoder.newInstance[ProjectedRasterTile](
fields = List(
RecordEncoderField(0, "tile", TypedEncoder[Tile]),
RecordEncoderField(1, "extent", TypedEncoder[Extent]),
RecordEncoderField(2, "crs", TypedEncoder[CRS])
)
)
}

object TypedEncoders extends TypedEncoders
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import geotrellis.vector.Extent
import org.apache.hadoop.conf.Configuration
import org.apache.spark.annotation.Experimental
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.rf.RasterSourceUDT
import org.locationtech.rasterframes.encoders.StandardEncoders
import org.locationtech.rasterframes.model.TileContext
import org.locationtech.rasterframes.{NOMINAL_TILE_DIMS, rfConfig}

Expand Down Expand Up @@ -100,10 +100,7 @@ object RFRasterSource extends LazyLogging {

def cacheStats = rsCache.stats()

implicit def rsEncoder: ExpressionEncoder[RFRasterSource] = {
RasterSourceUDT // Makes sure UDT is registered first
ExpressionEncoder()
}
implicit lazy val rsEncoder: ExpressionEncoder[RFRasterSource] = StandardEncoders.rfRasterSourceEncoder

def apply(source: URI): RFRasterSource =
rsCache.get(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import geotrellis.vector.{Extent, ProjectedExtent}
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.locationtech.rasterframes.ref.ProjectedRasterLike
import org.apache.spark.sql.catalyst.DefinedByConstructorParams
import org.locationtech.rasterframes.encoders.StandardEncoders

/**
* A Tile that's also like a ProjectedRaster, with delayed evaluation support.
Expand Down Expand Up @@ -58,5 +59,6 @@ object ProjectedRasterTile {

def unapply(prt: ProjectedRasterTile): Option[(Tile, Extent, CRS)] = Some((prt.tile, prt.extent, prt.crs))

implicit lazy val projectedRasterTileEncoder: ExpressionEncoder[ProjectedRasterTile] = ExpressionEncoder()
implicit lazy val projectedRasterTileEncoder: ExpressionEncoder[ProjectedRasterTile] =
StandardEncoders.projectedRasterTileEncoder
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ package org.locationtech.rasterframes.expressions
import geotrellis.proj4.{CRS, LatLng, WebMercator}
import geotrellis.raster.CellType
import geotrellis.vector._
import org.apache.spark.sql.Encoders
import org.apache.spark.sql.jts.JTSTypes
import org.locationtech.geomesa.curve.{XZ2SFC, Z2SFC}
import org.locationtech.rasterframes._
Expand Down Expand Up @@ -151,7 +150,6 @@ class SFCIndexerSpec extends TestEnvironment with Inspectors {
val tile = TestData.randomTile(2, 2, CellType.fromName("uint8"))
val prts = testExtents.map(reproject(crs)).map(ProjectedRasterTile(tile, _, crs))

implicit val enc = Encoders.tuple(ProjectedRasterTile.projectedRasterTileEncoder, Encoders.scalaInt)
// The `id` here is to deal with Spark auto projecting single columns dataframes and needing to provide an encoder
val df = prts.zipWithIndex.toDF("proj_raster", "id")
withClue("XZ2") {
Expand Down
4 changes: 4 additions & 0 deletions docs/src/main/paradox/release-notes.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@

## 0.10.x

### 0.10.1

* Fix UDTs registration ordering [#573](https://github.com/locationtech/rasterframes/pull/573)

### 0.10.0

* Upgraded to Scala 2.12 , Spark 3.1.2, and GeoTrellis 3.6.0 (a subtantial accomplishment!)
Expand Down
2 changes: 1 addition & 1 deletion pyrasterframes/src/main/python/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def dest_file(self, src_file):
# to throw a `NotImplementedError: Can't perform this operation for unregistered loader type`
pytest = 'pytest>=4.0.0,<5.0.0'

pyspark = 'pyspark==3.1.1'
pyspark = 'pyspark==3.1.2'
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Don't know how it worked before 🤦

boto3 = 'boto3'
deprecation = 'deprecation'
descartes = 'descartes'
Expand Down
2 changes: 1 addition & 1 deletion rf-notebook/src/main/docker/requirements-nb.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pyspark>=3.1
pyspark==3.1.2
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Spark 3.2 has been released and we need more strict constraints here.

gdal==3.1.2
numpy
pandas
Expand Down