Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- package com.yourcompany.raster
- import geotrellis.spark._
- import geotrellis.spark.io.hadoop._
- import org.apache.hadoop.fs.Path
- val zoomLevel = 14 // Zoom level we'll be working with throughout.
- val hc = HadoopCatalog(sc, new Path("hdfs://localhost.localdomain/user/cloudera/catalog") )
- // Not sure how to rename, a lot of info in those var names...is that necessary in this context?
- val ras594321_14 = hc.load[SpatialKey](LayerId("594321_GSY", zoomLevel) )
- val ras670385_14 = hc.load[SpatialKey](LayerId("670385_GSY", zoomLevel) )
- val result = (ras670385_14 + ras594321_14) / 80
- val resultId = LayerId("result_raster", zoomLevel)
- hc.save[SpatialKey](resultId, result)
- // Load it back out
- val reloadedResult = hc.load[SpatialKey](resultId)
- // Check to see that the result and the reloaded result are equal.
- result.combinePairs(reloadedResult) { case ((_, originalTile), (_, reloadedTile)) =>
- val (cols1, rows1) = originalTile.dimensions
- val (cols2, rows2) = reloadedTile.dimensions
- assert(cols1 == rows1)
- assert(cols2 == rows2)
- for(row <- 0 until rows1) {
- for(col <- 0 until cols1) {
- assert(originalTile.getDouble(col, row) == reloadedTile.getDouble(col, row))
- }
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement