创建一个函数来避免循环

Creating a functions to avoid loops

任何人都可以在这里突出显示可以转化为函数的区域吗?如果是这样,怎么办? 是否有编写函数或编写更简洁脚本的一般规则? 任何人都可以在这个脚本中看到任何红旗坏习惯吗?

示例上下文:

这个脚本的目的是运行通过轨迹坐标。对于每个位置,我想在指定的错误字段内生成 9 个随机样本。对于每个位置错误和原始点(总共 10 个),我希望从源中提取数据。在这种情况下,与形状文件的距离。然后我想取提取数据的平均值并将其添加回原始轨迹文件。

示例数据:

Date_Time           longitude       latitude 
27/10/2011 15:15    -91.98876953    1.671900034 
30/10/2011 14:31    -91.91790771    1.955003262 
30/10/2011 15:34    -91.91873169    1.961261749 
30/10/2011 20:55    -91.86060333    1.996331811 
31/10/2011 04:03    -91.67115021    1.929548025 
03/11/2011 18:36    -90.67552948    1.850875616 
04/11/2011 18:26    -90.65361023    1.799352288 
07/11/2011 19:29    -92.13287354    0.755102754 
07/11/2011 20:28    -92.13739014    0.783674061 
27/12/2011 13:43    -88.16407776    -4.953748703
07/01/2012 18:44    -82.51725006    -5.717019081
07/01/2012 19:30    -82.50763702    -5.706347942
07/01/2012 20:28    -82.50556183    -5.696153641 
07/01/2012 21:10    -82.50305176    -5.685819626
08/01/2012 00:27    -82.18003845    -5.623015404 
08/01/2012 18:37    -82.17269897    -5.61870575 
08/01/2012 19:20    -82.16355133    -5.612465382 

此数据表示一个文件,列表中会有很多文件。

实现任务的示例脚本:

#### Packages ####

library(dplyr)
library(geosphere)
library(rgdal)
library(rgeos)
library(truncnorm) 

# Load files

dir <- 'C:/Users/Documents/PhD/Chapters/'
sfolder <- paste0(dir, 'Data/Tracks/')
sfiles <- list.files(sfolder , '.csv', recursive = TRUE)

## Load the contours for proximity measurements 
# 200
contour2 <- readOGR(paste0(dir, 'QGIS/Base layers/2GEBCO_2020_Contour_200.gpkg'))

# 1000
contour1 <- readOGR(paste0(dir, 'QGIS/Base layers/2GEBCO_2020_Contour_1000.gpkg'))

# Land 
land <- readOGR(paste0(dir, 'QGIS/Base layers/GEBCO_2020_Contour_0.gpkg'))

# List of contours to extract 
extracts <- c('200','1000', '0')

# Extract proximity data for all tracks 

for (o in 1:length(sfiles))
{
  # o <- 1

tagType <- dirname(dirname(dirname((sfiles [o])))) # gives 'ARGOS' or 'PSAT' 
track <- read.csv(paste0(sfold , sfiles [o]))
ntrack <- nrow(track)

# Create data frame for proximity measures
proximity <-  data.frame(matrix(ncol = 3, nrow = ntrack))

# Generate random samples of each point 

for(i in 1:nrow(track))
{
  # i <- 1
  errors <- data.frame(matrix(ncol = 2, nrow = 10))

  if(tagType == 'PSAT')
  {
    meanx <- mean(track$longitude[i]-0.53, track$longitude[i]+0.53)
    meany <- mean(track$latitude[i]-1.08, track$latitude[i]+1.08)
    errors[,1] <-  rnorm(n=10, a=track$longitude[i]-0.53, b=track$longitude[i]+0.53, meanx)
    errors[,2] <- rtruncnorm(n=10, a=track$latitude[i]-1.08, b=track$latitude[i]+1.08, meany)
  }

  if(tagType == 'ARGOS')
  {
    meanx <- mean(track$longitude[i]-0.12, track$longitude[i]+0.12)
    meany <- mean(track$latitude[i]-0.12, track$latitude[i]+0.12)
    errors[,1] <-  rtruncnorm(n=10, a=track$longitude[i]-0.12, b=track$longitude[i]+0.12, meanx)
    errors[,2] <- rtruncnorm(n=10, a=track$latitude[i]-0.12, b=track$latitude[i]+0.12, meany)
  } 

errors[1,] <- c(track$longitude[i],track$latitude[i])  
colnames(errors) <- c('longitude', 'latitude')
errTrack <- SpatialPoints(errors[,c(1,2)])


# Now to get coordinates from contour files 

for(a in 1:length(extracts))
{
  # a <- 2
  extract <- extracts[a]

  if(extract == '200')
  { contour <- contour2 }
  if(extract == '1000')
  { contour <- contour1 }
  if(extract == '0')
  { contour <- land }

n <- length(errTrack) # 10 for 9 random samples + original location 
distances <- data.frame(matrix(ncol = 2, nrow = n))

for (e in seq_along(errTrack)) {
  distances[e,] <- coordinates(gNearestPoints(errTrack[e], contour))[2,]
}

allDist <- as.data.frame(distances)
colnames(allDist) <- c('longitude', 'latitude')


# Create objects with error lat/long and nearest contour lat/long

p1 <- cbind(errTrack$longitude, errTrack$latitude)
p2 <- cbind(allDist$longitude, allDist$latitude)

# Convert to Great Circle distance 

finalDist <- as.data.frame(distHaversine(p1, p2, r=6378137)/1000)
colnames(finalDist) <- 'distance'
finalDist <- finalDist %>% 
  mutate_if(is.numeric, round, digits = 2)

distValue <- mean(finalDist$distance)

proximity[i,a] <- distValue

} # end for all contour extracts
} # end for each row in track 

track$Proximity_land <- proximity$X3
track$Proximity_200m <- proximity$X1
track$Proximity_1000m <- proximity$X2

} # end for all tracks 

我知道这可能有点小众,但如果有人能够提供任何有关使用函数清理循环代码的一般方法的见解,或者如果有人可以指导我找到可能有用的资源,那就太好了赞赏。同样,如果有人可以专门帮助加速/清理这段代码,那就太棒了! (轮廓文件可以是随机多边形,以便在需要时进行复制)。 我希望这个问题适合这个论坛,如果不适合,请见谅。

我同意您的评价,即代码可以通过使用一些功能来澄清。通过使用函数,您可以将大型、复杂的程序分解成可管理的块,这些块可以单独推理。

关于程序中的循环,很多人认为地图比循环更清晰。它们本质上就像您在循环中所做的那样遍历元素集合,但不必跟踪索引变量。 purrr 包提供了一个优秀的地图集合和其他功能。

阅读这些主题和更多内容的一些很好的资源包括 https://rstudio-education.github.io/hopr/, https://r4ds.had.co.nz/, and https://adv-r.hadley.nz/

在下面的代码中,我试图将一些代码提取到函数中,希望使控制流更容易理解。由于我没有在实际数据上尝试过代码,如果不进行一些修复,它肯定无法工作,但希望它能给你一些想法。

calc_errors_psat <- function(long, lat) {
  calc_errTrack(long, lat, 0.53, 1.08)
}

calc_errors_argos <- function(long, lat) {
  calc_errTrack(long, lat, 0.12, 0.12)
}

calc_errTrack <- function(long, lat, long_offset, lat_offset) {
  # don't `meanx` and `meany` have the same as value as `long` and `lat`?
  meanx <- mean(long - long_offset, long + long_offset)
  meany <- mean(lat - lat_offset, lat + lat_offset)
  err_long <- rtruncnorm(n=10, a=long-long_offset, b=long+long_offset, meanx)
  err_lat <- rtruncnorm(n=10, a=lat-lat_offset, b=lat+lat_offset, meany)
  err <- data.frame(
    longitude = c(long, err_long),
    latitude  = c(lat, err_lat)
  )
  SpatialPoints(err)
}

calc_distValues <- function(errTrack, contour) {

  n <- length(errTrack) # 10 for 9 random samples + original location 
  distances <- data.frame(matrix(ncol = 2, nrow = n))

  for (e in seq_along(errTrack)) {
    distances[e,] <- coordinates(gNearestPoints(errTrack[e], contour))[2,]
  }

  allDist <- as.data.frame(distances)
  colnames(allDist) <- c('longitude', 'latitude')


  # Create objects with error lat/long and nearest contour lat/long

  p1 <- cbind(errTrack$longitude, errTrack$latitude)
  p2 <- cbind(allDist$longitude, allDist$latitude)

  # Convert to Great Circle distance 

  finalDist <- as.data.frame(distHaversine(p1, p2, r=6378137)/1000)
  colnames(finalDist) <- 'distance'
  finalDist <- finalDist %>% 
    mutate_if(is.numeric, round, digits = 2)

  mean(finalDist$distance)
}

find_err_fcn <- function(loc) {
  tagType <- dirname(dirname(dirname(loc)))
  if (tagType == "PSAT") {
    calc_errors_psat
  } else {
    calc_errors_argos
  }
}

# get the track file locations
dir <- 'C:/Users/Documents/PhD/Chapters/'
sfolder <- file.path(dir, 'Data/Tracks')
track_locs <- list.files(sfolder, full.names = TRUE)

# read in files and error functions into a data frame, and calculate the track
# errors
track_df <- tibble::tibble(
  track_list    = purrr::map(track_locs, read.csv),
  calc_err_fcns = purrr::map(track_locs, find_err_fcn),
  errTrack_list = purrr::map2(track_list, calc_err_fcns, function(x, f) f(x))
)

# calculate the track proximities distances
proximity_contours <- c(
  contour2 = readOGR(file.path(dir, 'QGIS/Base layers/2GEBCO_2020_Contour_200.gpkg')),
  contour1 = readOGR(file.path(dir, 'QGIS/Base layers/2GEBCO_2020_Contour_1000.gpkg')),
  land     = readOGR(file.path(dir, 'QGIS/Base layers/GEBCO_2020_Contour_0.gpkg'))
)

track_results <- purrr::map_dfc(
  .x = proximity_contours,
  .f = function(contour) purrr::map(
    .x      = track_df$errTrack_list,
    .f      = calc_distValue,
    contour = contour
  )
)