20 Nov

Get angle/orientation/direction of GPS track using R

Hey,

Today a bit of trSIGonometry with this function to compute the angle/orientation/direction between 2 points of a GPX file.

############################
#Function
############################
 
angle <- function(pt1, pt2) {
  conv = 180 / pi # conversion radians / degrees
  diff <- pt2 - pt1
  diff.abs <- abs(diff)
  if (diff[1]>=0 & diff[2]>=0){angle <- pi/2 - atan(diff.abs[2] / diff.abs[1])}
  if (diff[1]>=0 & diff[2]<=0){angle <- pi/2 + atan(diff.abs[2] / diff.abs[1])}
  if (diff[1]<=0 & diff[2]>=0){angle <- 3*pi/2 + atan(diff.abs[2] / diff.abs[1])}
  if (diff[1]<=0 & diff[2]<=0){angle <- pi + (pi/2- atan(diff.abs[2] / diff.abs[1]))}
  return(angle * conv)
}
 
############################
#Script
############################
 
library(rgdal)
library(raster)
library(data.table)
 
#Set folder
setwd("/home/GuruBlard/gpx/")
 
# Load file names
filenames <- list.files(pattern=".gpx$")
 
# Iteration through all gpx files
for (filename in filenames){
  # Load the GPX
 
  #ogrListLayers("export.gpx")
  gpx.trackpoints <- readOGR(dsn = filename,  layer= "track_points", stringsAsFactors = F)
 
  # Extract the coordinates as a matrix:
  gpx.tp <- coordinates(gpx.trackpoints)
 
  # #Loop through and add angle - the difference between the of the current point and the following one:
  for (tp in 1:(nrow(gpx.tp)-1)) {
    gpx.trackpoints$angle[tp] <- angle(gpx.tp[tp,], gpx.tp[tp+1,])
  }
 
  gpx.trackpoints@data$Date <- substr(as.character(gpx.trackpoints@data$time), 1,10)
  gpx.trackpoints@data$Hour <- substr(as.character(gpx.trackpoints@data$time),12,19)
  gpx.trackpoints@data$filename <- filename
  writeOGR(gpx.trackpoints, paste0(tools::file_path_sans_ext(filename), ".shp"), layer=tools::file_path_sans_ext(filename), driver="ESRI Shapefile")
}

Screenshot from 2017-11-20 14-53-09

17 Jul

Fast Euclidean Distance Function

Holà GISpo,

Today, a simple function to (highly) speed up the standard Euclidean distance function in R when you use large matrix (n> 100.000.000).

euc_dist <- function(m) {mtm <- Matrix::tcrossprod(m); sq <- rowSums(m*m);  sqrt(outer(sq,sq,"+") - 2*mtm)} 
 
# Example
M <- matrix(rexp(1000000, rate=.1), ncol=1000)
 
ptm <- proc.time()
d <- dist(M, method = "euclidean")
proc.time() - ptm
 
#   user  system elapsed 
#   1.08    0.00    1.08 
 
ptm <- proc.time()
d2 <- euc_dist(M)
d2 <- as.dist(d2)
proc.time() - ptm
 
#   user  system elapsed 
#  0.424   0.008   0.429 
 
isTRUE(all.equal(as.matrix(d), as.matrix(d2)))
#TRUE
12 May

Download daymet data with R

Hey folks ! It’s been a while !

Interested in meteorological data over US ? Here is a way to easily download Daymet data inspired from DaymetR package.

Daymet is a collection of algorithms and computer software designed to interpolate and extrapolate from daily meteorological observations to produce gridded estimates of daily weather parameters. Weather parameters generated include daily surfaces of minimum and maximum temperature, precipitation, humidity, and radiation produced on a 1 km x 1 km gridded surface over the conterminous United States, Mexico, and Southern Canada.

Download the “tiles” shapefile here.

setwd("/guru-blard/DAYMET")
 
param = c("vp", "tmin", "tmax", "swe", "srad", "prcp", "dayl") #see here https://daymet.ornl.gov/overview.html for the available variable
tiles=c(11922:11925, 11742:11745) #id of the tiles of interest (cfr shapefile)
year_range=1985:2015
 
for (i in year_range) {
  for (j in tiles) {
    for (k in param) {
      download_string = sprintf("https://daymet.ornl.gov/thredds/fileServer/ornldaac/1219/tiles/%s/%s_%s/%s.nc", 
                                i, j, i, k)
      daymet_file = paste(k, "_", i, "_", j, ".nc", 
                          sep = "")
      cat(paste("Downloading DAYMET data for tile: ", 
                j, "; year: ", i, "; product: ", k, "\n", 
                sep = ""))
      try(download(download_string, daymet_file, quiet = TRUE, 
                   mode = "wb"), silent = FALSE)
    }
  }
}
31 Aug

Merging two dataframe while keeping the original rows’ order

Hello GIStouille,

Here is a nice function to merge two dataframe while keeping the original rows’ order (source).
keep_order=1 keeps order of the first dataframe (x).
keep_order=2 keeps order of the second dataframe (y).

merge.with.order <- function(x,y, ..., sort = T, keep_order)
{
  # this function works just like merge, only that it adds the option to return the merged data.frame ordered by x (1) or by y (2)
  add.id.column.to.data <- function(DATA)
  {
    data.frame(DATA, id... = seq_len(nrow(DATA)))
  }
  # add.id.column.to.data(data.frame(x = rnorm(5), x2 = rnorm(5)))
  order.by.id...and.remove.it <- function(DATA)
  {
    # gets in a data.frame with the "id..." column.  Orders by it and returns it
    if(!any(colnames(DATA)=="id...")) stop("The function order.by.id...and.remove.it only works with data.frame objects which includes the 'id...' order column")
 
    ss_r <- order(DATA$id...)
    ss_c <- colnames(DATA) != "id..."
    DATA[ss_r, ss_c]
  }
 
  # tmp <- function(x) x==1; 1	# why we must check what to do if it is missing or not...
  # tmp()
 
  if(!missing(keep_order))
  {
    if(keep_order == 1) return(order.by.id...and.remove.it(merge(x=add.id.column.to.data(x),y=y,..., sort = FALSE)))
    if(keep_order == 2) return(order.by.id...and.remove.it(merge(x=x,y=add.id.column.to.data(y),..., sort = FALSE)))
    # if you didn't get "return" by now - issue a warning.
    warning("The function merge.with.order only accepts NULL/1/2 values for the keep_order variable")
  } else {return(merge(x=x,y=y,..., sort = sort))}
}
25 Aug

How to convert hdf files in tif?

Hi crazyGISfans,

Hdf format may be sometimes difficult to manage. We prefer working with geotif.
How do you do convert hdf in tif in R with gdal_translate?

library(gdalUtils)
library(tools)
 
setwd("/guru-blard/")
 
files<-list.files(, pattern=".hdf$")
band<-1
 
for (file in files){
  gdal_translate(src_dataset=get_subdatasets(file)[band], dst_dataset=paste0(file_path_sans_ext(file),"_B", band,'.tif'))
}

That’s it.

08 Jun

Assemble png into a grid using ImageMagick and R

Holà GISpa,

Here is a R function that uses the montage function of ImageBrick to assemble png in one grid. All the parameters are explained here.

pattern='CoolestPngEver_'
In<-list.files("/export/homes/gurublard/", full.names=T, pattern=pattern)
Out<-paste0(/export/homes/gurublard/", pattern,"all.png")
command<-paste0("montage -label '%f' -density 300 -tile 4x0 -geometry +5+50 -border 10 ", paste(In, collapse=" "), " ", Out)
system(command)

ImageMagick

02 Apr

Extract raster values in parallel

Dear fellow Remote Senseis,

Here is a code chunk that will help you speed up the processing time when you want to extract raster values at specific locations.
It uses the a package called snowfall which allows parallel processing.
As usual, please find here under a dummy example. As there is only 3 bands it the stack, you may not see the positive effect of parallel computing…try with a 100-layer stack and you’ll see.

Cheers

install.packages("snowfall")
 
library(snowfall)
library(raster)
 
ncpus <- 3
 
r <- (stack(system.file("external/rlogo.grd", package="raster")))
inSlist <- unstack(r) # convert to list of single raster layers
 
rId <- subset(r,1)
idPix <- which(rId[]>200)
 
sfInit(parallel=TRUE,cpus=ncpus)
sfLibrary(raster)
sfLibrary(rgdal)
a <- sfSapply(inSlist,extract,y=idPix)
sfStop()
04 Mar

List filenames in a directory that are different from a specific pattern

Dear followers,

Have you already need to list filenames in a directory that are different from a specific pattern ?

Just let the MaGIS happen with the list.diff function !

list.diff<-function(dir, pattern){
  filename.all<-list.files(dir)
  filename.diff<-list.files(dir, pattern)
  filename<-setdiff(filename.all,filename.diff)
  return(filename)
}
19 Feb

Larger and smaller extent shared by different rasters

Hello GISton,

Here is a way to get the larger (red) and the smaller (green) extent shared by different rasters.


extent_raster

library(raster)
 
# dummy extent from your rasters, instead use lapply(raster list, extent)
a <- raster(nrows=884, ncols=804, xmn=-45.85728, xmx=-43.76855, ymn=-2.388705, ymx=-0.5181549)
b <- raster(nrows=884, ncols=804, xmn=-45.87077, xmx=-43.78204, ymn=-2.388727, ymx=-0.5208711)
c <- raster(nrows=884, ncols=804, xmn=-45.81952, xmx=-43.7173,  ymn=-2.405129, ymx=-0.5154312)
 
a[] <- 1
b[] <- 2
c[] <- 3
 
plot(a, xlim=c(-45.8,-43.7), ylim=c(-2.41, -0.5))
par(new=TRUE)
plot(b, xlim=c(-45.8,-43.7), ylim=c(-2.41, -0.5))
 
a<-extent(-45, -30, -20, -10)
b<-extent(-55, -35, -25, -5) 
c<-extent(-40 ,-25 , -15 ,0)
extent_list<-list(a, b, c)
 
# make a matrix out of it, each column represents a raster, rows the values
extent_list<-lapply(extent_list, as.matrix)
matrix_extent<-matrix(unlist(extent_list), ncol=length(extent_list))
rownames(matrix_extent)<-c("xmin", "ymin", "xmax", "ymax")
 
# create an extent that covers all the individual extents
larger_extent<-extent(min(matrix_extent[1,]), max(matrix_extent[3,]),
                      min(matrix_extent[2,]), max(matrix_extent[4,]))
 
# create the larger extent shared by all the individual extents
smaller_extent<-extent(max(matrix_extent[1,]), min(matrix_extent[3,]),
                       max(matrix_extent[2,]), min(matrix_extent[4,]))
 
# Plot results
 
a.shp<-as(a, 'SpatialPolygons')
b.shp<-as(b, 'SpatialPolygons')
c.shp<-as(c, 'SpatialPolygons')
larger_extent.shp<-as(larger_extent, 'SpatialPolygons')
smaller_extent.shp<-as(smaller_extent, 'SpatialPolygons')
 
plot(a.shp,  xlim=c(larger_extent@xmin, larger_extent@xmax), ylim=c(larger_extent@ymin, larger_extent@ymax))
plot(b.shp,  xlim=c(larger_extent@xmin, larger_extent@xmax), ylim=c(larger_extent@ymin, larger_extent@ymax), add=TRUE)
plot(c.shp,  xlim=c(larger_extent@xmin, larger_extent@xmax), ylim=c(larger_extent@ymin, larger_extent@ymax), add=TRUE)
plot(larger_extent.shp,  xlim=c(larger_extent@xmin, larger_extent@xmax), ylim=c(larger_extent@ymin, larger_extent@ymax), 
     add=TRUE, lwd=3, border="red")
plot(smaller_extent.shp,  xlim=c(larger_extent@xmin, larger_extent@xmax), ylim=c(larger_extent@ymin, larger_extent@ymax), 
     add=TRUE, lwd=3, border="green")
19 Jan

Generate a random landscape

Hey GuruFans…

Have you ever wanted to test all your cool algorithms on a simulated landscape instead of a real one? No? Well, too bad… I will still show you how to create quickly a nice landscape from scratch using the R secr and raster packages…

# script to generate a random landscape

require(raster)
require(secr)
require(igraph)
tempmask <- make.mask(nx = 200, ny = 200, spacing = 10)

# Create 3 masks with different properties of the degree of fragmentation or aggregation of the patches (p), the proportion occupied by the patches (A) and the minimum patch size (minpatch)
DBFmask <- raster(randomHabitat(tempmask, p = 0.5, A = 0.2, minpatch = 20))
WCRmask <- raster(randomHabitat(tempmask, p = 0.4, A = 0.4, minpatch = 8))
SCRmask <- raster(randomHabitat(tempmask, p = 0.3, A = 0.5, minpatch = 5))

r <- raster(tempmask)

r[] <- 4
r[which(as.vector(SCRmask)==1)]<-3
r[which(as.vector(WCRmask)==1)]<-2
r[which(as.vector(DBFmask)==1)]<-1

plot(r)

And it should look something like this:

Rplot