From 6de69264ec43363853b44b409392ec7ecbc565c6 Mon Sep 17 00:00:00 2001 From: olivroy Date: Wed, 17 Jan 2024 14:59:26 -0500 Subject: [PATCH 1/5] Progress --- R/shift_longitude.R | 8 ++++---- R/tidyverse.R | 4 +++- man/st_shift_longitude.Rd | 6 +++--- man/tidyverse.Rd | 27 +++++++++++++++++++++------ sf.Rproj | 1 + 5 files changed, 32 insertions(+), 14 deletions(-) diff --git a/R/shift_longitude.R b/R/shift_longitude.R index a350da760..440b370f7 100644 --- a/R/shift_longitude.R +++ b/R/shift_longitude.R @@ -6,10 +6,10 @@ #' values straddling 180 degrees. In general, using a projected #' coordinate reference system is to be preferred, but this method permits a #' geographical coordinate reference system to be used. This is the sf -#' equivalent of \code{\link[sp:recenter-methods]{recenter}} in the sp package and -#' ST_ShiftLongitude in PostGIS. +#' equivalent of [recenter][sp::recenter] in the sp package and +#' `ST_ShiftLongitude` in PostGIS. #' -#' @param x object of class sf or sfc +#' @param x object of class `sf` or `sfc`. #' @param ... ignored #' #' @export @@ -46,5 +46,5 @@ st_shift_longitude.sfc = function(x, ...) { #' st_shift_longitude(d) st_shift_longitude.sf = function(x, ...) { st_geometry(x) = st_shift_longitude(st_geometry(x)) - return(x) + x } diff --git a/R/tidyverse.R b/R/tidyverse.R index 078de32a9..c3041841d 100644 --- a/R/tidyverse.R +++ b/R/tidyverse.R @@ -5,7 +5,7 @@ # This is currently only used in `bind_rows()` and `bind_cols()` # because sf overrides all default implementations -#' @name tidyverse + dplyr_reconstruct.sf = function(data, template) { sfc_name = attr(template, "sf_column") if (inherits(template, "tbl_df")) @@ -23,6 +23,7 @@ dplyr_reconstruct.sf = function(data, template) { ) } +#' @name tidyverse group_split.sf <- function(.tbl, ..., .keep = TRUE) { class(.tbl) = setdiff(class(.tbl), "sf") if (inherits(.tbl, "rowwise_df")) { @@ -232,6 +233,7 @@ rename.sf <- function(.data, ...) { st_set_agr(st_as_sf(ret, sf_column_name = sf_column), agr) } +#' @name tidyverse rename_with.sf = function(.data, .fn, .cols, ...) { if (!requireNamespace("rlang", quietly = TRUE)) stop("rlang required: install that first") # nocov diff --git a/man/st_shift_longitude.Rd b/man/st_shift_longitude.Rd index 5a2014de1..69d0ad88c 100644 --- a/man/st_shift_longitude.Rd +++ b/man/st_shift_longitude.Rd @@ -13,7 +13,7 @@ st_shift_longitude(x) \method{st_shift_longitude}{sf}(x, ...) } \arguments{ -\item{x}{object of class sf or sfc} +\item{x}{object of class \code{sf} or \code{sfc}.} \item{...}{ignored} } @@ -23,8 +23,8 @@ being represented on the far left and right of a plot because they have values straddling 180 degrees. In general, using a projected coordinate reference system is to be preferred, but this method permits a geographical coordinate reference system to be used. This is the sf -equivalent of \code{\link[sp:recenter-methods]{recenter}} in the sp package and -ST_ShiftLongitude in PostGIS. +equivalent of \link[sp:recenter-methods]{recenter} in the sp package and +\code{ST_ShiftLongitude} in PostGIS. } \examples{ ## sfc diff --git a/man/tidyverse.Rd b/man/tidyverse.Rd index a36427974..03d5106a7 100644 --- a/man/tidyverse.Rd +++ b/man/tidyverse.Rd @@ -2,7 +2,7 @@ % Please edit documentation in R/tidyverse.R, R/join.R \name{tidyverse} \alias{tidyverse} -\alias{dplyr_reconstruct.sf} +\alias{group_split.sf} \alias{filter.sf} \alias{arrange.sf} \alias{group_by.sf} @@ -12,6 +12,7 @@ \alias{transmute.sf} \alias{select.sf} \alias{rename.sf} +\alias{rename_with.sf} \alias{slice.sf} \alias{summarise.sf} \alias{summarise} @@ -36,7 +37,7 @@ \alias{anti_join.sf} \title{Tidyverse methods for sf objects (remove .sf suffix!)} \usage{ -dplyr_reconstruct.sf(data, template) +group_split.sf(.tbl, ..., .keep = TRUE) filter.sf(.data, ..., .dots) @@ -56,6 +57,8 @@ select.sf(.data, ...) rename.sf(.data, ...) +rename_with.sf(.data, .fn, .cols, ...) + slice.sf(.data, ..., .dots) summarise.sf(.data, ..., .dots, do_union = TRUE, is_coverage = FALSE) @@ -163,14 +166,22 @@ semi_join.sf(x, y, by = NULL, copy = FALSE, suffix = c(".x", ".y"), ...) anti_join.sf(x, y, by = NULL, copy = FALSE, suffix = c(".x", ".y"), ...) } \arguments{ -\item{data}{see original function docs} +\item{...}{other arguments} -\item{template}{see original function docs} +\item{.keep}{Should the join keys from both \code{x} and \code{y} be preserved in the +output? +\itemize{ +\item If \code{NULL}, the default, joins on equality retain only the keys from \code{x}, +while joins on inequality retain the keys from both inputs. +\item If \code{TRUE}, all keys from both inputs are retained. +\item If \code{FALSE}, only keys from \code{x} are retained. For right and full joins, +the data in key columns corresponding to rows that only exist in \code{y} are +merged into the key columns from \code{x}. Can't be used when joining on +inequality conditions. +}} \item{.data}{data object of class \link{sf}} -\item{...}{other arguments} - \item{.dots}{see corresponding function in package \code{dplyr}} \item{add}{see corresponding function in dplyr} @@ -185,6 +196,8 @@ more details.} \item{.keep_all}{see corresponding function in dplyr} +\item{data}{see original function docs} + \item{key}{see original function docs} \item{value}{see original function docs} @@ -304,6 +317,8 @@ it is a potentially expensive operation so you must opt into it.} \item{suffix}{If there are non-joined duplicate variables in \code{x} and \code{y}, these suffixes will be added to the output to disambiguate them. Should be a character vector of length 2.} + +\item{template}{see original function docs} } \value{ an object of class \link{sf} diff --git a/sf.Rproj b/sf.Rproj index e8ed6e447..89df4dd41 100644 --- a/sf.Rproj +++ b/sf.Rproj @@ -19,3 +19,4 @@ BuildType: Package PackageUseDevtools: Yes PackageInstallArgs: --no-multiarch --with-keep.source PackageRoxygenize: rd,collate,namespace + From 9abef46491a7fb3ca156ee19183f7d70ff06bed8 Mon Sep 17 00:00:00 2001 From: olivroy Date: Wed, 17 Jan 2024 18:46:35 -0500 Subject: [PATCH 2/5] minor edits to vignettes --- vignettes/sf1.Rmd | 46 +++++++++++++++++++++++----------------------- vignettes/sf2.Rmd | 30 +++++++++++++++--------------- vignettes/sf3.Rmd | 40 ++++++++++++++++++++-------------------- vignettes/sf4.Rmd | 8 ++++---- vignettes/sf5.Rmd | 14 +++++++------- vignettes/sf6.Rmd | 6 +++--- vignettes/sf7.Rmd | 21 ++++++++++----------- 7 files changed, 82 insertions(+), 83 deletions(-) diff --git a/vignettes/sf1.Rmd b/vignettes/sf1.Rmd index 4fe9ca977..115fa18eb 100644 --- a/vignettes/sf1.Rmd +++ b/vignettes/sf1.Rmd @@ -173,7 +173,7 @@ library(sf) nc <- st_read(system.file("shape/nc.shp", package="sf")) ``` -(Note that users will not use `system.file` but give a `filename` directly, and that shapefiles consist of more than one file, all with identical basename, which reside in the same directory.) +(Note that users will not use `system.file()` but give a `filename` directly, and that shapefiles consist of more than one file, all with identical basename, which reside in the same directory.) The short report printed gives the file name, the driver (ESRI Shapefile), mentions that there are 100 features (records, represented as rows) and 14 fields (attributes, represented as columns). This object is of class @@ -226,7 +226,7 @@ However, such objects: The column in the `sf` data.frame that contains the geometries is a list, of class `sfc`. We can retrieve the geometry list-column in this case by `nc$geom` or `nc[[15]]`, but the -more general way uses `st_geometry`: +more general way uses `st_geometry()`: ```{r} (nc_geom <- st_geometry(nc)) @@ -275,9 +275,9 @@ Methods for geometry list-columns include: methods(class = 'sfc') ``` -Coordinate reference systems (`st_crs` and `st_transform`) are discussed in the section on [coordinate reference systems](#crs). -`st_as_wkb` and `st_as_text` convert geometry list-columns into well-known-binary or well-known-text, explained [below](#wkb). -`st_bbox` retrieves the coordinate bounding box. +Coordinate reference systems (`st_crs()` and `st_transform()`) are discussed in the section on [coordinate reference systems](#crs). +`st_as_wkb()` and `st_as_text()` convert geometry list-columns into well-known-binary or well-known-text, explained [below](#wkb). +`st_bbox()` retrieves the coordinate bounding box. Attributes include: ```{r} @@ -468,7 +468,7 @@ by using the otherwise nearly identical but more quiet nc <- read_sf(filename) ``` -Writing takes place in the same fashion, using `st_write`: +Writing takes place in the same fashion, using `st_write()`: ```{r} st_write(nc, "nc.shp") @@ -489,7 +489,7 @@ write_sf(nc, "nc.shp") # silently overwrites ### Driver-specific options -The `dsn` and `layer` arguments to `st_read` and `st_write` +The `dsn` and `layer` arguments to `st_read()` and `st_write()` denote a data source name and optionally a layer name. Their exact interpretation as well as the options they support vary per driver, the [GDAL driver documentation](https://gdal.org/drivers/vector/index.html) @@ -502,11 +502,11 @@ meuse <- st_read("PG:dbname=postgis", "meuse") where the `PG:` string indicates this concerns the PostGIS driver, followed by database name, and possibly port and user credentials. -When the `layer` and `driver` arguments are not specified, `st_read` +When the `layer` and `driver` arguments are not specified, `st_read()` tries to guess them from the datasource, or else simply reads the first layer, giving a warning in case there are more. -`st_read` typically reads the coordinate reference system as +`st_read()` typically reads the coordinate reference system as `proj4string`, but not the EPSG (SRID). GDAL cannot retrieve SRID (EPSG code) from `proj4string` strings, and, when needed, it has to be set by the user. See also the section on [coordinate reference systems](#crs). @@ -553,7 +553,7 @@ bikraces <- st_read("bikeraces.kml") GDAL provides the [crud](https://en.wikipedia.org/wiki/Create,_read,_update_and_delete) (create, read, update, delete) functions to persistent storage. -`st_read` (or `read_sf`) are used for reading. `st_write` (or `write_sf`) +`st_read()` (or `read_sf()`) are used for reading. `st_write()` (or `write_sf()`) creates, and has the following arguments to control update and delete: * `update=TRUE` causes an existing data source to be updated, if it @@ -605,7 +605,7 @@ from such improvements, and limit some of the provenance of datasets, but may help reproducibility. Coordinate reference system transformations can be carried out using -`st_transform`, e.g. converting longitudes/latitudes in NAD27 to +`st_transform()`, e.g. converting longitudes/latitudes in NAD27 to web mercator (EPSG:3857) can be done by: ```{r} @@ -645,15 +645,15 @@ will override this, and create `MULTIPOLYGON`s in all cases. For `LINES` the sit The standard for simple feature access defines a number of geometrical operations. -`st_is_valid` and `st_is_simple` return a boolean indicating whether +`st_is_valid()` and `st_is_simple()` return a Boolean indicating whether a geometry is valid or simple. ```{r} st_is_valid(nc[1:2,]) ``` -`st_distance` returns a dense numeric matrix with distances -between geometries. `st_relate` returns a character matrix with the +`st_distance()` returns a dense numeric matrix with distances +between geometries. `st_relate()` returns a character matrix with the [DE9-IM](https://en.wikipedia.org/wiki/DE-9IM#Illustration) values for each pair of geometries: @@ -663,10 +663,10 @@ st_distance(x[c(1,4,22),], x[c(1, 33,55,56),]) st_relate(nc[1:5,], nc[1:4,]) ``` -The commands `st_intersects`, `st_disjoint`, `st_touches`, -`st_crosses`, `st_within`, `st_contains`, `st_overlaps`, -`st_equals`, `st_covers`, `st_covered_by`, `st_equals_exact` and -`st_is_within_distance` return a sparse matrix with matching (TRUE) +`st_intersects()`, `st_disjoint()`, `st_touches()`, +`st_crosses()`, `st_within()`, `st_contains()`, `st_overlaps()`, +`st_equals()`, `st_covers()`, `st_covered_by()`, `st_equals_exact()` and +`st_is_within_distance()` return a sparse matrix (`sgbp` object) with matching (`TRUE`) indexes, or a full logical matrix: ```{r} @@ -674,9 +674,9 @@ st_intersects(nc[1:5,], nc[1:4,]) st_intersects(nc[1:5,], nc[1:4,], sparse = FALSE) ``` -The commands `st_buffer`, `st_boundary`, `st_convexhull`, -`st_union_cascaded`, `st_simplify`, `st_triangulate`, -`st_polygonize`, `st_centroid`, `st_segmentize`, and `st_union` +`st_buffer()`, `st_boundary()`, `st_convexhull()`, +`st_union_cascaded`, `st_simplify`, `st_triangulate()`, +`st_polygonize()`, `st_centroid()`, `st_segmentize()`, and `st_union()` return new geometries, e.g.: ```{r fig.height=3} @@ -688,8 +688,8 @@ plot(geom, add = TRUE) plot(st_buffer(geom, -5000), add = TRUE, border = 'blue') ``` -Commands `st_intersection`, `st_union`, `st_difference`, -`st_sym_difference` return new geometries that are a function of +`st_intersection()`, `st_union()`, `st_difference()`, and +`st_sym_difference()` return new geometries that are a function of pairs of geometries: ```{r fig.height=3} diff --git a/vignettes/sf2.Rmd b/vignettes/sf2.Rmd index 7a47eca24..0ddfd425b 100644 --- a/vignettes/sf2.Rmd +++ b/vignettes/sf2.Rmd @@ -27,10 +27,10 @@ formats (text, [sp](https://cran.r-project.org/package=sp)) ## Reading and writing through GDAL The Geospatial Data Abstraction Library -([GDAL](https://gdal.org/)) is the swiss army knife for spatial +([GDAL](https://gdal.org/)) is the Swiss Army Knife for spatial data: it reads and writes vector and raster data from and to practically every file format, or database, of significance. Package `sf` reads -and writes using GDAL by the functions `st_read` and `st_write`. +and writes using GDAL using `st_read()` and `st_write()`. The data model GDAL uses needs @@ -84,7 +84,7 @@ In eval(substitute(expr), envir, enclos) : automatically selected the first layer in a data source containing more than one. ``` -The message points to the `st_layers` command, which lists the driver +The message points to the `st_layers()` command, which lists the driver and layers in a datasource, e.g. ```{r eval=FALSE} @@ -105,19 +105,19 @@ A particular layer can now be read by e.g. st_read("PG:dbname=postgis", "sids") ``` -`st_layers` has the option to count the number of features in case +`st_layers()` has the option to count the number of features in case these are missing: some datasources (e.g. OSM xml files) do not report the number of features, but need to be completely read for this. GDAL allows for more than one geometry column for a feature -layer; these are reported by `st_layers`. +layer; these are reported by `st_layers()`. In case a layer contains only geometries but no attributes (fields), -`st_read` still returns an `sf` object, with a geometry column only. +`st_read()` still returns an `sf` object, with a geometry column only. We see that GDAL automatically detects the driver (file format) of the datasource, by trying them all in turn. -`st_read` follows the conventions of base R, similar to how it +`st_read()` follows the conventions of base R, similar to how it reads tabular data into `data.frame`s. This means that character data are read, by default as `factor`s. For those who insist on retrieving character data as character vectors, the argument @@ -145,7 +145,7 @@ st_write(nc, "nc1.shp") The file name is taken as the data source name. The default for the layer name is the basename (filename without path) of the the -data source name. For this, `st_write` needs to guess the driver. The +data source name. For this, `st_write()` needs to guess the driver. The above command is, for instance, equivalent to: ```{r} @@ -220,7 +220,7 @@ will give an error. Driver-specific options are documented in the driver manual of [gdal](https://gdal.org/drivers/vector/index.html). Multiple options can be given by multiple strings in `options`. -For `st_read`, there is only `options`; for `st_write`, one needs +For `st_read()`, there is only `options`; for `st_write()`, one needs to distinguish between `dataset_options` and `layer_options`, the first related to opening a dataset, the second to creating layers in the dataset. @@ -278,7 +278,7 @@ str = st_as_text(x) x ``` -We can convert back from WKT by using `st_as_sfc`: +We can convert back from WKT by using `st_as_sfc()`: ```{r} st_as_sfc(str) @@ -286,7 +286,7 @@ st_as_sfc(str) ### Conversion to and from well-known binary -Well-known binary is created from simple features by `st_as_binary`: +Well-known binary is created from simple features by `st_as_binary()`: ```{r} x = st_linestring(matrix(0:9,ncol=2,byrow=TRUE)) @@ -294,15 +294,15 @@ x = st_linestring(matrix(0:9,ncol=2,byrow=TRUE)) class(x) ``` -The object returned by `st_as_binary` is of class `WKB` and is +The object returned by `st_as_binary()` is of class `WKB` and is either a list with raw vectors, or a single raw vector. These -can be converted into a hexadecimal character vector using `rawToHex`: +can be converted into a hexadecimal character vector using `rawToHex()`: ```{r} rawToHex(x) ``` -Converting back to `sf` uses `st_as_sfc`: +Converting back to `sf` uses `st_as_sfc()`: ```{r} x = st_as_binary(st_sfc(st_point(0:1), st_point(5:6))) @@ -312,7 +312,7 @@ st_as_sfc(x) ### Conversion to and from sp Spatial objects as maintained by package `sp` can be converted into -simple feature objects or geometries by `st_as_sf` and `st_as_sfc`, +simple feature objects or geometries by `st_as_sf()` and `st_as_sfc()`, respectively: ```{r} diff --git a/vignettes/sf3.Rmd b/vignettes/sf3.Rmd index 1d655bd34..776b36e7a 100644 --- a/vignettes/sf3.Rmd +++ b/vignettes/sf3.Rmd @@ -28,11 +28,11 @@ manipulated, where manipulations include ## Type transformations -This sections discusses how simple feature geometries of one type can be converted to another. For converting lines to polygons, see also `st_polygonize` below. +This sections discusses how simple feature geometries of one type can be converted to another. For converting lines to polygons, see also `st_polygonize()` below. ### For single geometries -For single geometries, `st_cast` will +For single geometries, `st_cast()` will 1. convert from XX to MULTIXX, e.g. `LINESTRING` to `MULTILINESTRING` 2. convert from MULTIXX to XX if MULTIXX has length one (else, it will still convert but warn about loss of information) @@ -54,7 +54,7 @@ st_geometrycollection(list(st_point(c(1,1)))) %>% st_cast("POINT") ### For collections of geometry (sfc) and simple feature collections (sf) -It should be noted here that when reading geometries using `st_read`, the `type` argument can be used to control the class of the returned geometry: +It should be noted here that when reading geometries using `st_read()`, the `type` argument can be used to control the class of the returned geometry: ```{r} shp = system.file("shape/nc.shp", package="sf") class(st_geometry(st_read(shp, quiet = TRUE))) @@ -64,7 +64,7 @@ class(st_geometry(st_read(shp, quiet = TRUE, type = 1))) This option is handled by the GDAL library; in case of failure to convert to the target type, the original types are returned, which in this case is a mix of `POLYGON` and `MULTIPOLYGON` geometries, leading to a `GEOMETRY` as superclass. When we try to read multipolygons as polygons, all secondary rings of multipolygons get lost. -When functions return objects with mixed geometry type (`GEOMETRY`), downstream functions such as `st_write` may have difficulty handling them. For some of these cases, `st_cast` may help modify their type. For sets of geometry objects (`sfc`) and simple feature sets (`sf), `st_cast` can be used by specifying the target type, or without specifying it. +When functions return objects with mixed geometry type (`GEOMETRY`), downstream functions such as `st_write()` may have difficulty handling them. For some of these cases, `st_cast()` may help modify their type. For sets of geometry objects (`sfc`) and simple feature sets (`sf), `st_cast` can be used by specifying the target type, or without specifying it. ```{r} ls <- st_linestring(rbind(c(0,0),c(1,1),c(2,1))) @@ -74,7 +74,7 @@ st_cast(sfc, "MULTILINESTRING") sf <- st_sf(a = 5:4, geom = sfc) st_cast(sf, "MULTILINESTRING") ``` -When no target type is given, `st_cast` tries to be smart for two cases: +When no target type is given, `st_cast()` tries to be smart for two cases: 1. if the class of the object is `GEOMETRY`, and all elements are of identical type, and 2. if all elements are length-one `GEOMETRYCOLLECTION` objects, in which case `GEOMETRYCOLLECTION` objects are replaced by their content (which may be a `GEOMETRY` mix again) @@ -129,7 +129,7 @@ plot(cntrd, col = 'red', add = TRUE, cex = .5) ### Getting and setting coordinate reference systems of sf objects The coordinate reference system of objects of class `sf` or `sfc` is -obtained by `st_crs`, and replaced by `st_crs<-`: +obtained by `st_crs()`, and replaced by `st_crs<-`: ```{r} library(sf) geom = st_sfc(st_point(c(0,1)), st_point(c(11,12))) @@ -163,7 +163,7 @@ a missing value, and then set it to the intended value. s3 <- s1 %>% st_set_crs(NA) %>% st_set_crs(3857) ``` To carry out a coordinate conversion or transformation, we use -`st_transform` +`st_transform()` ```{r} s3 <- s1 %>% st_transform(3857) s3 @@ -197,20 +197,20 @@ plot(y, border = 'green', add = TRUE) ### Unary operations -`st_is_valid` returns whether polygon geometries are topologically valid: +`st_is_valid()` returns whether polygon geometries are topologically valid: ```{r} b0 = st_polygon(list(rbind(c(-1,-1), c(1,-1), c(1,1), c(-1,1), c(-1,-1)))) b1 = st_polygon(list(rbind(c(-1,-1), c(1,-1), c(1,1), c(0,-1), c(-1,-1)))) st_is_valid(st_sfc(b0,b1)) ``` -and `st_is_simple` whether line geometries are simple: +and `st_is_simple()` whether line geometries are simple: ```{r} s = st_sfc(st_linestring(rbind(c(0,0), c(1,1))), st_linestring(rbind(c(0,0), c(1,1),c(0,1),c(1,0)))) st_is_simple(s) ``` -`st_area` returns the area of polygon geometries, `st_length` the +`st_area()` returns the area of polygon geometries, `st_length()` the length of line geometries: ```{r} st_area(x) @@ -220,12 +220,12 @@ st_length(st_sfc(st_multilinestring(list(rbind(c(0,0),c(1,1),c(1,2))),rbind(c(0, ``` ### Binary operations: distance and relate -`st_distance` computes the shortest distance matrix between geometries; this is +`st_distance()` computes the shortest distance matrix between geometries; this is a dense matrix: ```{r} st_distance(x,y) ``` -`st_relate` returns a dense character matrix with the DE9-IM relationships +`st_relate()` returns a dense character matrix with the DE9-IM relationships between each pair of geometries: ```{r} st_relate(x,y) @@ -248,10 +248,10 @@ geometry sets, dense matrices take up a lot of memory and are mostly filled with `FALSE` values, hence the default is to return a sparse matrix. -`st_intersects` returns for every geometry pair whether they +`st_intersects()` returns for every geometry pair whether they intersect (dense matrix), or which elements intersect (sparse). -Note that the function `st_intersection` in this package returns -a geometry for the intersection instead of logicals as in `st_intersects` (see the next section of this vignette). +Note that `st_intersection()` in this package returns +a geometry for the intersection instead of logicals as in `st_intersects()` (see the next section of this vignette). Other binary predicates include (using sparse for readability): @@ -304,16 +304,16 @@ plot(x) plot(st_centroid(u), add = TRUE, col = 'red') ``` -The intersection of two geometries is the geometry covered by both; it is obtained by `st_intersection`: +The intersection of two geometries is the geometry covered by both; it is obtained by `st_intersection()`: ```{r, fig=TRUE} plot(x) plot(y, add = TRUE) plot(st_intersection(st_union(x),st_union(y)), add = TRUE, col = 'red') ``` -Note that the function `st_intersects` returns a logical matrix indicating whether each geometry pair intersects (see the previous section in this vignette). +Note that `st_intersects()` returns a logical matrix indicating whether each geometry pair intersects (see the previous section in this vignette). -To get _everything but_ the intersection, use `st_difference` or `st_sym_difference`: +To get _everything but_ the intersection, use `st_difference()` or `st_sym_difference()`: ```{r,fig=TRUE} par(mfrow=c(2,2), mar = c(0,0,1,0)) plot(x, col = '#ff333388'); @@ -330,7 +330,7 @@ plot(st_sym_difference(st_union(y),st_union(x)), col = 'lightblue', add = TRUE) title("sym_difference(x,y)") ``` -Function `st_segmentize` adds points to straight line sections of a lines or polygon object: +`st_segmentize()` adds points to straight line sections of a lines or polygon object: ```{r,fig=TRUE} par(mfrow=c(1,3),mar=c(1,1,0,0)) pts = rbind(c(0,0),c(1,0),c(2,1),c(3,1)) @@ -347,7 +347,7 @@ plot(pol.seg, col = 'grey') points(pol.seg[[1]]) ``` -Function `st_polygonize` polygonizes a multilinestring, as long as the points form a closed polygon: +`st_polygonize()` polygonizes a multilinestring, as long as the points form a closed polygon: ```{r,fig=TRUE} par(mfrow=c(1,2),mar=c(0,0,1,0)) mls = st_multilinestring(list(matrix(c(0,0,0,1,1,1,0,0),,2,byrow=TRUE))) diff --git a/vignettes/sf4.Rmd b/vignettes/sf4.Rmd index 9c0f485d6..0d0a88e85 100644 --- a/vignettes/sf4.Rmd +++ b/vignettes/sf4.Rmd @@ -60,7 +60,7 @@ Ashe = nc[nc$NAME == "Ashe",] class(Ashe) nc[Ashe,] ``` -We see that in the result set `Ashe` is included, as the default value for argument `op` in `[.sf` is `st_intersects`, and `Ashe` intersects with itself. We could exclude self-intersection by using predicate `st_touches` (overlapping features don't touch): +We see that in the result set `Ashe` is included, as the default value for argument `op` in `[.sf` is `st_intersects()`, and `Ashe` intersects with itself. We could exclude self-intersection by using predicate `st_touches()` (overlapping features don't touch): ```{r} Ashe = nc[nc$NAME == "Ashe",] nc[Ashe, op = st_touches] @@ -82,7 +82,7 @@ plot(st_geometry(Ashe), border = '#ff8888', add = TRUE, lwd = 2) ## Joining two feature sets based on attributes -The usual join verbs of base R (`merge`) and of dplyr (`left_join`, etc) work for `sf` objects as well; the joining takes place on attributes (ignoring geometries). In case of no matching geometry, an empty geometry is substituted. The second argument should be a `data.frame` (or similar), not an `sf` object: +The usual join verbs of base R (`merge`) and of dplyr (`left_join()`, etc) work for `sf` objects as well; the joining takes place on attributes (ignoring geometries). In case of no matching geometry, an empty geometry is substituted. The second argument should be a `data.frame` (or similar), not an `sf` object: ```{r} x = st_sf(a = 1:2, geom = st_sfc(st_point(c(0,0)), st_point(c(1,1)))) @@ -94,7 +94,7 @@ right_join(x, y) ## Joining two feature sets based on geometries -For joining based on spatial intersections (of any kind), `st_join` is used: +For joining based on spatial intersections (of any kind), `st_join()` is used: ```{r fig=TRUE} x = st_sf(a = 1:3, geom = st_sfc(st_point(c(1,1)), st_point(c(2,2)), st_point(c(3,3)))) @@ -114,7 +114,7 @@ st_join(y, x) and the geometry retained is that of the first argument. -The spatial join predicate can be controlled with any function compatible with `st_intersects` (the default), e.g. +The spatial join predicate can be controlled with any function compatible with `st_intersects()` (the default), e.g. ```{r} st_join(x, y, join = st_covers) # no matching y records: points don't cover circles diff --git a/vignettes/sf5.Rmd b/vignettes/sf5.Rmd index 8b78b488d..ff54c03b5 100644 --- a/vignettes/sf5.Rmd +++ b/vignettes/sf5.Rmd @@ -39,7 +39,7 @@ plot(st_geometry(nc), col = sf.colors(12, categorical = TRUE), border = 'grey', plot(st_geometry(st_centroid(nc)), pch = 3, col = 'red', add = TRUE) ``` -and legends, titles and so on can be added afterwards. `border=NA` removes the polygon borders. +and legends, titles and so on can be added afterwards. `border = NA` removes the polygon borders. As can be seen, the axes plotted are sensitive to the CRS, and in case of longitude/latitude coordinates, degree symbols and orientation are added if `axes = TRUE`. @@ -92,7 +92,7 @@ Color breaks (class intervals) can be controlled by plot arguments `breaks` and plot(nc["AREA"], breaks = c(0,.05,.1,.15,.2,.25)) ``` -or `breaks` is used to indicate a breaks-finding method that is passed as the `style` argument to `classInt::classIntervals`. Its default value, `pretty`, results in rounded class breaks, and has as a side effect that `nbreaks` may be honoured only approximately. Other methods include `"equal"` to break the data range into `"nbreaks"` equal classes, `"quantile"` to use quantiles as class breaks, and `"jenks"`, used in other software. +or `breaks` is used to indicate a breaks-finding method that is passed as the `style` argument to `classInt::classIntervals()`. Its default value, `pretty`, results in rounded class breaks, and has as a side effect that `nbreaks` may be honoured only approximately. Other methods include `"equal"` to break the data range into `"nbreaks"` equal classes, `"quantile"` to use quantiles as class breaks, and `"jenks"`, used in other software. ```{r} plot(nc["AREA"], breaks = "jenks") @@ -116,7 +116,7 @@ plot(st_geometry(eqc), axes = TRUE) # Graticules -Graticules are grid lines along equal longitude (meridians) or latitude (parallels) that, depending on the projection used, often plot as curved lines on a map, giving it reference in terms of longitude and latitude. The `sf` function `st_graticule` tries to create a graticule grid for arbitrary maps. As there are infinitely many projections, there are most likely many cases where it does not succeed in doing this well, and examples of these are welcomed as [sf issues](https://github.com/r-spatial/sf/issues). +Graticules are grid lines along equal longitude (meridians) or latitude (parallels) that, depending on the projection used, often plot as curved lines on a map, giving it reference in terms of longitude and latitude. `sf::st_graticule()` tries to create a graticule grid for arbitrary maps. As there are infinitely many projections, there are most likely many cases where it does not succeed in doing this well, and examples of these are welcomed as [sf issues](https://github.com/r-spatial/sf/issues). The following plot shows a graticule geometry on itself, ```{r} @@ -135,7 +135,7 @@ When we compute the graticule within the plotting function, we know the plotting plot(usa, graticule = TRUE, key.pos = NULL, axes = TRUE) ``` -We can also pass a `crs` object to `graticule` to obtain a graticule in a datum different from the default (WGS84). `st_graticule` takes parameters, and we can pass an object returned by it to the `graticule` parameter of `plot`, to get finer control: +We can also pass a `crs` object to `graticule` to obtain a graticule in a datum different from the default (WGS84). `st_graticule()` takes parameters, and we can pass an object returned by it to the `graticule` parameter of `plot`, to get finer control: ```{r} g = st_graticule(usa, lon = seq(-130,-65,5)) plot(usa, graticule = g, key.pos = NULL, axes = TRUE, @@ -149,17 +149,17 @@ which still doesn't look great -- completely controlling the plotting region of ## grid: `st_as_grob` -Package `sf` provides a number of methods for `st_as_grob`: +Package `sf` provides a number of methods for `st_as_grob()`: ```{r} methods(st_as_grob) ``` -which convert simple simple feature objects into `grob` ("graphics objects") objects; `grob`s are the graphic primitives of the `grid` plotting package. These methods can be used by plotting packages that build on `grid`, such as `ggplot2` (which uses them in `geom_sf`) and `tmap`. In addition, `st_viewport` can be used to set up a grid viewport from an `sf` object, with an aspect ratio similar to that of `base::plot.sf`. +which convert simple simple feature objects into `grob` ("graphics objects") objects; `grob`s are the graphic primitives of the `grid` plotting package. These methods can be used by plotting packages that build on `grid`, such as `ggplot2` (which uses them in `geom_sf()`) and `tmap`. In addition, `st_viewport()` can be used to set up a grid viewport from an `sf` object, with an aspect ratio similar to that of `plot.sf()`. ## ggplot2 -contains a geom specially for simple feature objects, with support for graticule white lines in the background using `sf::st_graticule`. Support is currently good for polygons; for lines or points, your mileage may vary. +contains a geom specially for simple feature objects, with support for graticule white lines in the background using `sf::st_graticule()`. Support is currently good for polygons; for lines or points, your mileage may vary. ```{r} library(ggplot2) diff --git a/vignettes/sf6.Rmd b/vignettes/sf6.Rmd index a26d1a6c7..c2df63864 100644 --- a/vignettes/sf6.Rmd +++ b/vignettes/sf6.Rmd @@ -46,7 +46,7 @@ See also `st_axis_order()` `sf` objects can have more than one geometry list-column, but always only one geometry column is considered _active_, -and returned by `st_geometry`. When there are multiple +and returned by `st_geometry()`. When there are multiple geometry columns, the default `print` methods reports which one is active: ```{r} @@ -56,7 +56,7 @@ nc$geom2 = st_centroid(st_geometry(nc)) print(nc, n = 2) ``` -We can switch the active geometry by using `st_geometry<-` or `st_set_geometry`, as in +We can switch the active geometry by using `st_geometry<-` or `st_set_geometry()`, as in ```{r} plot(st_geometry(nc)) st_geometry(nc) <- "geom2" @@ -65,7 +65,7 @@ plot(st_geometry(nc)) # Does `st_simplify` preserve topology? -`st_simplify` is a topology-preserving function, but does this on the +`st_simplify()` is a topology-preserving function, but does this on the level of individual feature geometries. That means, simply said, that after applying it, a polygon will still be a polygon. However when two features have a longer shared boundary, applying `st_simplify` diff --git a/vignettes/sf7.Rmd b/vignettes/sf7.Rmd index 06d962da3..e557d0097 100644 --- a/vignettes/sf7.Rmd +++ b/vignettes/sf7.Rmd @@ -132,7 +132,7 @@ Simple feature geometries should obey a ring direction too: exterior rings should be counter clockwise, interior (hole) rings should be clockwise, but in some sense this is obsolete as the difference between exterior ring and interior rings is defined by their position -(exterior, followed by zero or more interior). `sf::read_sf` has an +(exterior, followed by zero or more interior). `sf::read_sf()` has an argument `check_ring_dir` that checks, and corrects, ring directions and many (legacy) datasets have wrong ring directions. With wrong ring directions, many things still work. @@ -296,7 +296,7 @@ summary(as.vector(d1)-as.vector(d2)) # Predicates All unary and binary predicates are available in `s2`, except for -`st_relate` with a pattern. In addition, when using the `s2` predicates, +`st_relate()` with a pattern. In addition, when using the `s2` predicates, depending on the `model`, intersections with neighbours are only reported when `model` is `closed` (the default): ```{r} @@ -309,8 +309,8 @@ st_intersects(nc[1:3,], nc[1:3,], model = "semi-open") # only self-intersections # Transformations -`st_intersection`, `st_union`, `st_difference` and -`st_sym_difference` are available as `s2` equivalents. N-ary +`st_intersection()`, `st_union()`, `st_difference()`, and +`st_sym_difference()` are available as `s2` equivalents. N-ary intersection and difference are not (yet) present; cascaded union is present; unioning by feature does not work with `s2`. @@ -342,8 +342,7 @@ excessive simplification (bottom right). Note that buffers created with s2 _alwa follow s2 cell boundaries, they are never smooth. Hence, choosing a large number for `max_cells` leads to seemingly smooth but, zoomed in, very complex buffers. -To achieve a similar result you could first transform the result and then use -the `st_buffer()` function from `sf`. A simple benchmark shows the +To achieve a similar result you could first transform thes result and then use `sf::st_buffer()`. A simple benchmark shows the computational efficiency of the `s2` geometry engine in comparison with transforming and then creating buffers: @@ -385,19 +384,19 @@ low resolution of the input geometry representing the UK. As discussed in the [`sf` issue tracker](https://github.com/r-spatial/sf/issues/1367), deciding on workflows and selecting appropriate levels of level of geographic resolution can be an iterative process. -`st_buffer` as powered by GEOS, for $R^2$ data, are smooth and (nearly) exact. -`st_buffer` as powered by $S^2$ is rougher, complex, non-smooth, and may need tuning. -An common pattern where `st_buffer` is used is this: +`st_buffer()` as powered by GEOS, for $R^2$ data, are smooth and (nearly) exact. +`st_buffer()` as powered by $S^2$ is rougher, complex, non-smooth, and may need tuning. +An common pattern where `st_buffer()` is used is this: * compute buffers around a set of features `x` (points, lines, polygons) -* within each of these buffers, find all occurances of some other spatial +* within each of these buffers, find all occurrences of some other spatial variable `y` and aggregate them (e.g. count points, or average a raster variable like precipitation or population density) * work with these aggregated values (discard the buffer) When this is the case, and you are working with geographic coordinates, it may pay off to _not_ compute buffers, but instead -directly work with `st_is_within_distance` to select, for each +directly work with `st_is_within_distance()` to select, for each feature of `x`, all features of `y` that are within a certain distance `d` from `x`. The $S^2$ version of this function uses spatial indexes, so is fast for large datasets. From 89434dc4b98061ec9a4c5307727247108fa03db2 Mon Sep 17 00:00:00 2001 From: olivroy Date: Wed, 17 Jan 2024 18:47:48 -0500 Subject: [PATCH 3/5] docs minor edits --- NEWS.md | 12 ++++++------ R/break_antimeridian.R | 4 ++-- R/crs.R | 2 +- R/datasets.R | 4 ++-- R/geom-predicates.R | 10 +++++----- R/geom-transformers.R | 19 ++++++++++++------- R/plot.R | 2 +- R/proj.R | 17 ++++++++--------- R/read.R | 14 +++++++------- R/sfc.R | 12 ++++++++---- R/sfg.R | 2 +- R/spatstat.R | 2 +- R/stars.R | 2 +- R/tidyverse.R | 24 ++++++++++++------------ R/transform.R | 2 +- R/wkb.R | 4 ++-- man/gdal_addo.Rd | 2 +- man/geos_binary_ops.Rd | 4 ++-- man/geos_binary_pred.Rd | 8 ++++---- man/geos_combine.Rd | 13 +++++++++---- man/nc.Rd | 4 ++-- man/plot.Rd | 2 +- man/proj_tools.Rd | 18 ++++++++---------- man/st.Rd | 2 +- man/st_as_sfc.Rd | 4 ++-- man/st_break_antimeridian.Rd | 4 ++-- man/st_is_longlat.Rd | 2 +- man/st_precision.Rd | 5 ++++- man/st_read.Rd | 10 +++++----- man/st_relate.Rd | 2 +- man/st_transform.Rd | 2 +- man/st_write.Rd | 4 ++-- man/st_zm.Rd | 5 +++-- man/tidyverse.Rd | 26 +++++++++----------------- 34 files changed, 128 insertions(+), 121 deletions(-) diff --git a/NEWS.md b/NEWS.md index 7ea72611a..46e2ea120 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,6 +1,6 @@ # version 1.0-16 -* `st_sample` for polygons is sensitive to setting `oriented = TRUE` to prevent wrongly correcting ring directions; #2308 +* `st_sample()` for polygons is sensitive to setting `oriented = TRUE` to prevent wrongly correcting ring directions; #2308 * add support for the GDAL `footprint` utility (requiring GDAL >= 3.8.0) to `gdal_utils`; #2305, by @goergen95 @@ -40,9 +40,9 @@ * if the env. variable `ADD_SF_NAMESPACE` is set to `true`, `sf` objects get a new attribute, `.sf_namespace`, which forces loading the `sf` namespace when it has not been loaded so far, e.g. for proper printing or plotting of an `sf` object; #2212 by Mike Mahoney -* `distinct.sf` is type-safe for `sf` objects with zero rows; #2204 +* `distinct.sf()` is type-safe for `sf` objects with zero rows; #2204 -* `summarise.sf` raises an error if `.by` is given but no `across()` on the geometry; #2207 +* `summarise.sf()` raises an error if `.by` is given but no `across()` on the geometry; #2207 * `st_write()` matches fields on name first, than on position; this matters for formats that have pre-defined names, such as GPX; #2202 @@ -150,7 +150,7 @@ * `sf_project()` accepts 3- or 4-column matrices, containing z and t values; -* optimizations for `st_sfc()` by @paleolimbot; #1938, #1925 +* optimization for `st_sfc()` by @paleolimbot; #1938, #1925 * `[<-.sfc()` recomputes the bounding box; `st_sfc()` gets parameter `compute_bbox`; #1965 @@ -817,7 +817,7 @@ * have `st_graticule` return an empty graticule object when argument `datum` is `NA`; -* export `as_Spatial`, to make it easer for packages to convert `sfc` objects without importing `sf` +* export `as_Spatial`, to make it easier for packages to convert `sfc` objects without importing `sf` * `st_distance` gains a parameter `by_element` to obtain pairwise distances; #437 @@ -1027,7 +1027,7 @@ * add `st_proj_info`, modelled after `rgdal::projInfo` -* overwriting datasets with `st_write` is no longer allowed; update=TRUE appends to them, permitted the driver supports appending. +* overwriting datasets with `st_write()` is no longer allowed; `update=TRUE` appends to them, permitted the driver supports appending. * `st_write` gains an argument, `update`, which when `TRUE` will try to append to existing datasets (#204) diff --git a/R/break_antimeridian.R b/R/break_antimeridian.R index 59ff4108b..0845d870b 100644 --- a/R/break_antimeridian.R +++ b/R/break_antimeridian.R @@ -10,10 +10,10 @@ #' the protruding geometries will also be split using the same \code{tol=} #' values; in this case empty geometries will be dropped first. #' -#' @param x object of class sf or sfc +#' @param x object of class `sf` or `sfc` #' @param lon_0 target central longitude (degrees) #' @param tol half of break width (degrees, default 0.0001) -#' @param ... ingnored here +#' @param ... ignored here #' @export #' @name st_break_antimeridian #' @examples diff --git a/R/crs.R b/R/crs.R index 119b10d38..a73bdb158 100644 --- a/R/crs.R +++ b/R/crs.R @@ -241,7 +241,7 @@ st_set_crs = function(x, value) { #' #' Assert whether simple feature coordinates are longlat degrees #' @param x object of class \link{sf} or \link{sfc}, or otherwise an object of a class that has an \link{st_crs} method returning a \code{crs} object -#' @return TRUE if x has geographic coordinates, FALSE if it has projected coordinates, or NA if \code{is.na(st_crs(x))}. +#' @return `TRUE` if `x` has geographic coordinates, `FALSE` if it has projected coordinates, or `NA` if \code{is.na(st_crs(x))}. #' @export st_is_longlat = function(x) { crs = st_crs(x) diff --git a/R/datasets.R b/R/datasets.R index 49843944d..444e05a36 100644 --- a/R/datasets.R +++ b/R/datasets.R @@ -2,8 +2,8 @@ #' #' Sudden Infant Death Syndrome (SIDS) sample data for North Carolina counties, #' two time periods (1974-78 and 1979-84). The details of the columns can be -#' found on the seealso URL, spdep package's vignette. Please note that, -#' though this is basically the same as \code{nc.sids} dataset in spData +#' found in a [spdep packages vignette](https://r-spatial.github.io/spdep/articles/sids.html). +#' Please note that, though this is basically the same as \code{nc.sids} dataset in spData #' package, \code{nc} only contains a subset of variables. The differences are #' also discussed on the vignette. #' @format A `sf` object diff --git a/R/geom-predicates.R b/R/geom-predicates.R index f2797c040..38c20980f 100644 --- a/R/geom-predicates.R +++ b/R/geom-predicates.R @@ -80,7 +80,7 @@ st_geos_binop = function(op, x, y, par = 0.0, pattern = NA_character_, #' @param x object of class \code{sf}, \code{sfc} or \code{sfg} #' @param y object of class \code{sf}, \code{sfc} or \code{sfg} #' @param pattern character; define the pattern to match to, see details. -#' @param sparse logical; should a sparse matrix be returned (TRUE) or a dense matrix? +#' @param sparse logical; should a sparse matrix be returned (`TRUE`) or a dense matrix? #' @return In case \code{pattern} is not given, \code{st_relate} returns a dense \code{character} matrix; element `[i,j]` has nine characters, referring to the DE9-IM relationship between `x[i]` and `y[j]`, encoded as IxIy,IxBy,IxEy,BxIy,BxBy,BxEy,ExIy,ExBy,ExEy where I refers to interior, B to boundary, and E to exterior, and e.g. BxIy the dimensionality of the intersection of the the boundary of `x[i]` and the interior of `y[j]`, which is one of: 0, 1, 2, or F; digits denoting dimensionality of intersection, F denoting no intersection. When \code{pattern} is given, a dense logical matrix or sparse index list returned with matches to the given pattern; see \link{st_intersection} for a description of the returned matrix or list. See also \url{https://en.wikipedia.org/wiki/DE-9IM} for further explanation. #' @export #' @examples @@ -113,9 +113,9 @@ st_relate = function(x, y, pattern = NA_character_, sparse = !is.na(pattern)) { #' @name geos_binary_pred #' @param x object of class \code{sf}, \code{sfc} or \code{sfg} #' @param y object of class \code{sf}, \code{sfc} or \code{sfg}; if missing, \code{x} is used -#' @param sparse logical; should a sparse index list be returned (TRUE) or a dense logical matrix? See below. +#' @param sparse logical; should a sparse index list be returned (`TRUE`) or a dense logical matrix? See below. #' @inheritDotParams s2::s2_options -#' @param prepared logical; prepare geometry for x, before looping over y? See Details. +#' @param prepared logical; prepare geometry for `x`, before looping over `y`? See Details. #' @details If \code{prepared} is \code{TRUE}, and \code{x} contains POINT geometries and \code{y} contains polygons, then the polygon geometries are prepared, rather than the points. #' @return If \code{sparse=FALSE}, \code{st_predicate} (with \code{predicate} e.g. "intersects") returns a dense logical matrix with element \code{i,j} \code{TRUE} when \code{predicate(x[i], y[j])} (e.g., when geometry of feature i and j intersect); if \code{sparse=TRUE}, an object of class \code{\link{sgbp}} with a sparse list representation of the same matrix, with list element \code{i} an integer vector with all indices j for which \code{predicate(x[i],y[j])} is \code{TRUE} (and hence a zero-length integer vector if none of them is \code{TRUE}). From the dense matrix, one can find out if one or more elements intersect by \code{apply(mat, 1, any)}, and from the sparse list by \code{lengths(lst) > 0}, see examples below. #' @details For most predicates, a spatial index is built on argument \code{x}; see \url{https://r-spatial.org/r/2017/06/22/spatial-index.html}. @@ -218,8 +218,8 @@ st_overlaps = function(x, y, sparse = TRUE, prepared = TRUE, ...) st_geos_binop("overlaps", x, y, sparse = sparse, prepared = prepared, ...) #' @name geos_binary_pred -#' @param retain_unique logical; if TRUE (and y is missing) return only indexes of points larger than the current index; this can be used to select unique geometries, see examples. This argument can be used for all geometry predictates; see als \link{distinct.sf} to find records where geometries AND attributes are distinct. -#' @param remove_self logical; if TRUE (and y is missing) return only indexes of geometries different from the current index; this can be used to omit self-intersections; see examples. This argument can be used for all geometry predictates +#' @param retain_unique logical; if `TRUE` (and `y` is missing) return only indexes of points larger than the current index; this can be used to select unique geometries, see examples. This argument can be used for all geometry predicates; see also \link{distinct.sf} to find records where geometries AND attributes are distinct. +#' @param remove_self logical; if `TRUE` (and `y` is missing) return only indexes of geometries different from the current index; this can be used to omit self-intersections; see examples. This argument can be used for all geometry predicates #' @export st_equals = function(x, y, sparse = TRUE, prepared = FALSE, ..., retain_unique = FALSE, remove_self = FALSE) { diff --git a/R/geom-transformers.R b/R/geom-transformers.R index 15101171a..fa92c1f0b 100644 --- a/R/geom-transformers.R +++ b/R/geom-transformers.R @@ -15,7 +15,7 @@ #' @param mitreLimit numeric; limit of extension for a join if \code{joinStyle} 'MITRE' is used (default 1.0, minimum 0.0); see details #' @param singleSide logical; if \code{TRUE}, single-sided buffers are returned for linear geometries, #' in which case negative \code{dist} values give buffers on the right-hand side, positive on the left; see details -#' @param ... passed on to \code{s2_buffer_cells} +#' @param ... passed on to [s2::s2_buffer_cells()] #' @return an object of the same class of \code{x}, with manipulated geometry. #' @export #' @details \code{st_buffer} computes a buffer around this geometry/each geometry. If any of \code{endCapStyle}, @@ -817,7 +817,7 @@ get_first_sfg = function(x) { #' @note To find whether pairs of simple feature geometries intersect, use #' the function \code{\link{st_intersects}} instead of \code{st_intersection}. #' -#' When using GEOS and not using s2 polygons contain their boundary. When using s2 this is determined by the \code{model} defaults of \link[s2]{s2_options}, which can be overriden via the ... argument, e.g. \code{model = "closed"} to force DE-9IM compliant behaviour of polygons (and reproduce GEOS results). +#' When using GEOS and not using s2 polygons contain their boundary. When using s2 this is determined by the \code{model} defaults of \link[s2]{s2_options}, which can be overridden via the ... argument, e.g. \code{model = "closed"} to force DE-9IM compliant behaviour of polygons (and reproduce GEOS results). #' @examples #' set.seed(131) #' library(sf) @@ -899,7 +899,7 @@ st_difference.sfg = function(x, y, ...) #' numbers in the argument to \code{x}; geometries that are empty #' or contained fully inside geometries with higher priority are removed entirely. #' The \code{st_difference.sfc} method with a single argument returns an object with -#' an \code{"idx"} attribute with the orginal index for returned geometries. +#' an \code{"idx"} attribute with the original index for returned geometries. st_difference.sfc = function(x, y, ...) { if (missing(y)) { if (isTRUE(st_is_longlat(x))) @@ -978,16 +978,21 @@ st_snap.sf = function(x, y, tolerance) #' @name geos_combine #' @export -#' @param by_feature logical; if TRUE, union each feature if \code{y} is missing or else each pair of features; if FALSE return a single feature that is the geometric union of the set of features in \code{x} if \code{y} is missing, or else the unions of each of the elements of the Cartesian product of both sets -#' @param is_coverage logical; if TRUE, use an optimized algorithm for features that form a polygonal coverage (have no overlaps) +#' @param by_feature logical; if `TRUE`, union each feature if \code{y} is missing or else each pair of features; if `FALSE` return a single feature that is the geometric union of the set of features in \code{x} if \code{y} is missing, or else the unions of each of the elements of the Cartesian product of both sets +#' @param is_coverage logical; if `TRUE`, use an optimized algorithm for features that form a polygonal coverage (have no overlaps) #' @param y object of class \code{sf}, \code{sfc} or \code{sfg} (optional) #' @param ... ignored #' @seealso \link{st_intersection}, \link{st_difference}, \link{st_sym_difference} #' @return If \code{y} is missing, \code{st_union(x)} returns a single geometry with resolved boundaries, else the geometries for all unioned pairs of `x[i]` and `y[j]`. #' @details -#' If \code{st_union} is called with a single argument, \code{x}, (with \code{y} missing) and \code{by_feature} is \code{FALSE} all geometries are unioned together and an \code{sfg} or single-geometry \code{sfc} object is returned. If \code{by_feature} is \code{TRUE} each feature geometry is unioned individually. This can for instance be used to resolve internal boundaries after polygons were combined using \code{st_combine}. If \code{y} is provided, all elements of \code{x} and \code{y} are unioned, pairwise if \code{by_feature} is TRUE, or else as the Cartesian product of both sets. +#' If \code{st_union} is called with a single argument, \code{x}, (with \code{y} missing) and \code{by_feature} is \code{FALSE} all geometries are unioned together and an \code{sfg} or single-geometry \code{sfc} object is returned. +#' If \code{by_feature} is \code{TRUE} each feature geometry is unioned individually. +#' This can for instance be used to resolve internal boundaries after polygons were combined using \code{st_combine}. +#' If \code{y} is provided, all elements of \code{x} and \code{y} are unioned, pairwise if \code{by_feature} is TRUE, or else as the Cartesian product of both sets. #' -#' Unioning a set of overlapping polygons has the effect of merging the areas (i.e. the same effect as iteratively unioning all individual polygons together). Unioning a set of LineStrings has the effect of fully noding and dissolving the input linework. In this context "fully noded" means that there will be a node or endpoint in the output for every endpoint or line segment crossing in the input. "Dissolved" means that any duplicate (e.g. coincident) line segments or portions of line segments will be reduced to a single line segment in the output. Unioning a set of Points has the effect of merging all identical points (producing a set with no duplicates). +#' Unioning a set of overlapping polygons has the effect of merging the areas (i.e. the same effect as iteratively unioning all individual polygons together). +#' Unioning a set of LineStrings has the effect of fully noding and dissolving the input linework. In this context "fully noded" means that there will be a node or endpoint in the output for every endpoint or line segment crossing in the input. +#' "Dissolved" means that any duplicate (e.g. coincident) line segments or portions of line segments will be reduced to a single line segment in the output. Unioning a set of Points has the effect of merging all identical points (producing a set with no duplicates). #' @examples #' plot(st_union(nc)) st_union = function(x, y, ..., by_feature = FALSE, is_coverage = FALSE) UseMethod("st_union") diff --git a/R/plot.R b/R/plot.R index ebca1236a..6daf760d1 100644 --- a/R/plot.R +++ b/R/plot.R @@ -29,7 +29,7 @@ kw_dflt = function(x, key.pos) { #' @param pal palette function, similar to \link{rainbow}, or palette values; if omitted, \code{sf.colors} is used #' @param nbreaks number of colors breaks (ignored for \code{factor} or \code{character} variables) #' @param breaks either a numeric vector with the actual breaks, or a name of a method accepted by the \code{style} argument of \link[classInt]{classIntervals} -#' @param max.plot integer; lower boundary to maximum number of attributes to plot; the default value (9) can be overriden by setting the global option \code{sf_max.plot}, e.g. \code{options(sf_max.plot=2)} +#' @param max.plot integer; lower boundary to maximum number of attributes to plot; the default value (9) can be overridden by setting the global option \code{sf_max.plot}, e.g. \code{options(sf_max.plot=2)} #' @param key.pos numeric; side to plot a color key: 1 bottom, 2 left, 3 top, 4 right; set to \code{NULL} to omit key completely, 0 to only not plot the key, or -1 to select automatically. If multiple columns are plotted in a single function call by default no key is plotted and every submap is stretched individually; if a key is requested (and \code{col} is missing) all maps are colored according to a single key. Auto select depends on plot size, map aspect, and, if set, parameter \code{asp}. If it has lenght 2, the second value, ranging from 0 to 1, determines where the key is placed in the available space (default: 0.5, center). #' @param key.width amount of space reserved for the key (incl. labels), thickness/width of the scale bar #' @param key.length amount of space reserved for the key along its axis, length of the scale bar diff --git a/R/proj.R b/R/proj.R index 3b9a3acd0..cadbb7052 100644 --- a/R/proj.R +++ b/R/proj.R @@ -83,7 +83,7 @@ sf_project = function(from = character(0), to = character(0), pts, keep = FALSE, #' #' Query or manage PROJ search path and network settings #' @param paths the search path to be set; omit if paths need to be queried -#' @param with_proj logical; if `NA` set for both GDAL and PROJ, otherwise set either for PROJ (TRUE) or GDAL (FALSE) +#' @param with_proj logical; if `NA` set for both GDAL and PROJ, otherwise set either for PROJ (`TRUE`) or GDAL (`FALSE`) #' @return `sf_proj_search_paths()` returns the search path (possibly after setting it) #' @name proj_tools #' @export @@ -103,7 +103,7 @@ sf_proj_search_paths = function(paths = character(0), with_proj = NA) { } } -#' @param enable logical; set this to enable (TRUE) or disable (FALSE) the proj network search facility +#' @param enable logical; set this to enable (`TRUE`) or disable (`FALSE`) the proj network search facility #' @param url character; use this to specify and override the default proj network CDN #' @return `sf_proj_network` when called without arguments returns a logical indicating whether #' network search of datum grids is enabled, when called with arguments it returns a character @@ -117,8 +117,7 @@ sf_proj_network = function(enable = FALSE, url = character(0)) { CPL_enable_network(url, enable) } -#' @param source_crs object of class `crs` or character -#' @param target_crs object of class `crs` or character +#' @param source_crs,target_crs object of class `crs` or character #' @param authority character; constrain output pipelines to those of authority #' @param AOI length four numeric; desired area of interest for the resulting #' coordinate transformations (west, south, east, north, in degrees). @@ -133,15 +132,15 @@ sf_proj_network = function(enable = FALSE, url = character(0)) { #' registered in the grid_alternatives table of its database) were available. Used typically when #' networking is enabled.) #' @param desired_accuracy numeric; only return pipelines with at least this accuracy -#' @param strict_containment logical; default FALSE; permit partial matching of the area -#' of interest; if TRUE strictly contain the area of interest. +#' @param strict_containment logical; default `FALSE`; permit partial matching of the area +#' of interest; if `TRUE` strictly contain the area of interest. #' The area of interest is either as given in AOI, or as implied by the #' source/target coordinate reference systems -#' @param axis_order_authority_compliant logical; if FALSE always +#' @param axis_order_authority_compliant logical; if `FALSE` always #' choose ‘x’ or longitude for the first #' axis; if TRUE, follow the axis orders given by the coordinate reference systems when -#' constructing the for the first axis; if FALSE, follow the axis orders given by -#' @return `sf_proj_pipelines` returns a table with candidate coordinate transformation +#' constructing the for the first axis; if `FALSE`, follow the axis orders given by +#' @return `sf_proj_pipelines()` returns a table with candidate coordinate transformation #' pipelines along with their accuracy; `NA` accuracy indicates ballpark accuracy. #' @name proj_tools #' @export diff --git a/R/read.R b/R/read.R index 949c75d48..93a73fdb7 100644 --- a/R/read.R +++ b/R/read.R @@ -37,7 +37,7 @@ set_utf8 = function(x) { #' @param options character; driver dependent dataset open options, multiple #' options supported. For possible values, see the "Open options" section #' of the GDAL documentation of the corresponding driver, and -#' https://github.com/r-spatial/sf/issues/1157 for an example. +#' for an example. #' @param quiet logical; suppress info on name, driver, size and spatial #' reference, or signaling no or multiple layers #' @param geometry_column integer or character; in case of multiple geometry @@ -54,10 +54,10 @@ set_utf8 = function(x) { #' converted to factors? Default for \code{read_sf} or R version >= 4.1.0 is #' \code{FALSE}, for \code{st_read} and R version < 4.1.0 equal to #' \code{default.stringsAsFactors()} -#' @param int64_as_string logical; if TRUE, Int64 attributes are returned as -#' string; if FALSE, they are returned as double and a warning is given when +#' @param int64_as_string logical; if `TRUE`, Int64 attributes are returned as +#' string; if `FALSE`, they are returned as double and a warning is given when #' precision is lost (i.e., values are larger than 2^53). -#' @param check_ring_dir logical; if TRUE, polygon ring directions are checked +#' @param check_ring_dir logical; if `TRUE`, polygon ring directions are checked #' and if necessary corrected (when seen from above: exterior ring counter #' clockwise, holes clockwise) #' @details for \code{geometry_column}, see also @@ -269,7 +269,7 @@ process_cpl_read_ogr_stream = function(x, geom_column_info, num_features, fid_co #' @param drivers character; limited set of driver short names to be tried (default: try all) #' @param wkt_filter character; WKT representation of a spatial filter (may be used as bounding box, selecting overlapping geometries); see examples #' @param optional logical; passed to \link[base]{as.data.frame}; always \code{TRUE} when \code{as_tibble} is \code{TRUE} -#' @param use_stream Use TRUE to use the experimental columnar interface introduced in GDAL 3.6. +#' @param use_stream Use `TRUE` to use the experimental columnar interface introduced in GDAL 3.6. #' @note The use of \code{system.file} in examples make sure that examples run regardless where R is installed: #' typical users will not use \code{system.file} but give the file name directly, either with full path or relative #' to the current working directory (see \link{getwd}). "Shapefiles" consist of several files with the same basename @@ -826,8 +826,8 @@ check_append_delete <- function(append, delete) { #' @name st_write #' @export -#' @details st_delete deletes layer(s) in a data source, or a data source if layers are -#' omitted; it returns TRUE on success, FALSE on failure, invisibly. +#' @details `st_delete()` deletes layer(s) in a data source, or a data source if layers are +#' omitted; it returns `TRUE` on success, `FALSE` on failure, invisibly. st_delete = function(dsn, layer = character(0), driver = guess_driver_can_write(dsn), quiet = FALSE) { invisible(CPL_delete_ogr(dsn, layer, driver, quiet) == 0) } diff --git a/R/sfc.R b/R/sfc.R index 2ff1a013e..277be96db 100644 --- a/R/sfc.R +++ b/R/sfc.R @@ -324,9 +324,10 @@ st_geometry_type = function(x, by_geometry = TRUE) { #' Drop Z and/or M dimensions from feature geometries, resetting classes appropriately #' @param x object of class \code{sfg}, \code{sfc} or \code{sf} #' @param ... ignored -#' @param drop logical; drop, or (FALSE) add? +#' @param drop logical; drop, or (`FALSE`) add? #' @param what character which dimensions to drop or add -#' @details Only combinations \code{drop=TRUE}, \code{what = "ZM"}, and \code{drop=FALSE}, \code{what="Z"} are supported so far. In case \code{add=TRUE}, \code{x} should have \code{XY} geometry, and zero values are added for \code{Z}. +#' @details Only combinations \code{drop=TRUE}, \code{what = "ZM"}, and \code{drop=FALSE}, \code{what="Z"} are supported so far. +#' In case \code{add=TRUE}, \code{x} should have \code{XY} geometry, and zero values are added for \code{Z}. #' @examples #' st_zm(st_linestring(matrix(1:32,8))) #' x = st_sfc(st_linestring(matrix(1:32,8)), st_linestring(matrix(1:8,2))) @@ -406,11 +407,14 @@ st_precision.sfc <- function(x) { #' Set precision #' -#' @name st_precision +#' @rdname st_precision #' @param precision numeric, or object of class \code{units} with distance units (but see details); see \link{st_as_binary} for how to do this. #' @details If \code{precision} is a \code{units} object, the object on which we set precision must have a coordinate reference system with compatible distance units. #' -#' Setting a \code{precision} has no direct effect on coordinates of geometries, but merely set an attribute tag to an \code{sfc} object. The effect takes place in \link{st_as_binary} or, more precise, in the C++ function \code{CPL_write_wkb}, where simple feature geometries are being serialized to well-known-binary (WKB). This happens always when routines are called in GEOS library (geometrical operations or predicates), for writing geometries using \link{st_write} or \link{write_sf}, \code{st_make_valid} in package \code{lwgeom}; also \link{aggregate} and \link{summarise} by default union geometries, which calls a GEOS library function. Routines in these libraries receive rounded coordinates, and possibly return results based on them. \link{st_as_binary} contains an example of a roundtrip of \code{sfc} geometries through WKB, in order to see the rounding happening to R data. +#' Setting a \code{precision} has no direct effect on coordinates of geometries, but merely set an attribute tag to an \code{sfc} object. +#' The effect takes place in \link{st_as_binary} or, more precise, in the C++ function \code{CPL_write_wkb}, where simple feature geometries are being serialized to well-known-binary (WKB). +#' This happens always when routines are called in GEOS library (geometrical operations or predicates), for writing geometries using \link{st_write} or \link{write_sf}, \code{st_make_valid} in package \code{lwgeom}; also \link{aggregate} and \link{summarise} by default union geometries, which calls a GEOS library function. +#' Routines in these libraries receive rounded coordinates, and possibly return results based on them. \link{st_as_binary} contains an example of a roundtrip of \code{sfc} geometries through WKB, in order to see the rounding happening to R data. #' #' The reason to support precision is that geometrical operations in GEOS or liblwgeom may work better at reduced precision. For writing data from R to external resources it is harder to think of a good reason to limiting precision. #' diff --git a/R/sfg.R b/R/sfg.R index 5649a94d8..de77e3f72 100644 --- a/R/sfg.R +++ b/R/sfg.R @@ -235,7 +235,7 @@ format.sfg = function(x, ..., width = 30) { #' @name st #' @param ... objects to be pasted together into a single simple feature #' @param recursive logical; ignored -#' @param flatten logical; if TRUE, try to simplify results; if FALSE, return geometrycollection containing all objects +#' @param flatten logical; if `TRUE`, try to simplify results; if `FALSE`, return geometrycollection containing all objects #' @examples #' c(st_point(1:2), st_point(5:6)) #' c(st_point(1:2), st_multipoint(matrix(5:8,2))) diff --git a/R/spatstat.R b/R/spatstat.R index 2fed6ba29..8b8127d0e 100644 --- a/R/spatstat.R +++ b/R/spatstat.R @@ -1,4 +1,4 @@ -# window_polygons_from_edges = function (w) { +# window_polygons_from_edges = function(w) { # mw = as.matrix(w$ends) # lst1 = lapply(seq_len(NROW(mw)), function(i) st_linestring(matrix(mw[i,], 2, byrow = TRUE))) # p0 = st_polygonize(do.call(c, do.call(st_sfc, lst1))) diff --git a/R/stars.R b/R/stars.R index 75a0ab46f..87113e9bd 100644 --- a/R/stars.R +++ b/R/stars.R @@ -388,7 +388,7 @@ gdal_create = function(f, nxy, values, crs, xlim, ylim) { CPL_create(as.character(f), as.integer(nxy), as.double(values), crs$wkt, as.double(xlim), as.double(ylim)) } -#' add or remove overviews to/from a raster image +#' Add or remove overviews to/from a raster image #' #' add or remove overviews to/from a raster image #' @param file character; file name diff --git a/R/tidyverse.R b/R/tidyverse.R index c3041841d..66bd36f80 100644 --- a/R/tidyverse.R +++ b/R/tidyverse.R @@ -23,16 +23,6 @@ dplyr_reconstruct.sf = function(data, template) { ) } -#' @name tidyverse -group_split.sf <- function(.tbl, ..., .keep = TRUE) { - class(.tbl) = setdiff(class(.tbl), "sf") - if (inherits(.tbl, "rowwise_df")) { - lapply(dplyr::group_split(.tbl, ...), st_as_sf) - } else { - lapply(dplyr::group_split(.tbl, ..., .keep = .keep), st_as_sf) - } -} - #' Tidyverse methods for sf objects (remove .sf suffix!) #' #' Tidyverse methods for sf objects. Geometries are sticky, use \link{as.data.frame} to let \code{dplyr}'s own methods drop them. Use these methods without the .sf suffix and after loading the tidyverse package with the generic (or after loading package tidyverse). @@ -562,7 +552,17 @@ sample_n.sf <- function(tbl, size, replace = FALSE, weight = NULL, .env = parent sample_frac.sf <- function(tbl, size = 1, replace = FALSE, weight = NULL, .env = parent.frame()) { st_sf(NextMethod(), sf_column_name = attr(tbl, "sf_column")) } - +#' @name tidyverse +#' @param .tbl see original function docs +#' @param .keep see original function docs +group_split.sf <- function(.tbl, ..., .keep = TRUE) { + class(.tbl) = setdiff(class(.tbl), "sf") + if (inherits(.tbl, "rowwise_df")) { + lapply(dplyr::group_split(.tbl, ...), st_as_sf) + } else { + lapply(dplyr::group_split(.tbl, ..., .keep = .keep), st_as_sf) + } +} #' @name tidyverse #' @examples #' if (require(tidyr, quietly = TRUE) && require(dplyr, quietly = TRUE)) { @@ -574,7 +574,7 @@ sample_frac.sf <- function(tbl, size = 1, replace = FALSE, weight = NULL, .env = #' plot(trs.sf["year"], axes = TRUE) #' } #' @details \code{nest} assumes that a simple feature geometry list-column was among the columns that were nested. -nest.sf = function (.data, ...) { +nest.sf = function(.data, ...) { if (!requireNamespace("rlang", quietly = TRUE)) stop("rlang required: install first?") diff --git a/R/transform.R b/R/transform.R index 4eb93f06c..e376d0dd0 100644 --- a/R/transform.R +++ b/R/transform.R @@ -71,7 +71,7 @@ st_can_transform = function(src, dst) { #' ambiguity in the axis order of the specified coordinate reference system; #' if you need the traditional GIS order, use \code{"OGC:CRS84"}, not #' \code{"EPSG:4326"}. Extra care is needed with the ESRI Shapefile format, -#' because WKT1 does not store axis order unambigiously. +#' because WKT1 does not store axis order unambiguously. #' #' @seealso \link[lwgeom]{st_transform_proj}, part of package lwgeom. #' diff --git a/R/wkb.R b/R/wkb.R index 038c52156..16582ab0d 100644 --- a/R/wkb.R +++ b/R/wkb.R @@ -22,9 +22,9 @@ skip0x = function(x) { } #' @name st_as_sfc -#' @param EWKB logical; if TRUE, parse as EWKB (extended WKB; PostGIS: ST_AsEWKB), otherwise as ISO WKB (PostGIS: ST_AsBinary) +#' @param EWKB logical; if `TRUE`, parse as EWKB (extended WKB; PostGIS: ST_AsEWKB), otherwise as ISO WKB (PostGIS: ST_AsBinary) #' @param spatialite logical; if \code{TRUE}, WKB is assumed to be in the spatialite dialect, see \url{https://www.gaia-gis.it/gaia-sins/BLOB-Geometry.html}; this is only supported in native endian-ness (i.e., files written on system with the same endian-ness as that on which it is being read). -#' @param pureR logical; if TRUE, use only R code, if FALSE, use compiled (C++) code; use TRUE when the endian-ness of the binary differs from the host machine (\code{.Platform$endian}). +#' @param pureR logical; if `TRUE`, use only R code, if `FALSE`, use compiled (C++) code; use `TRUE` when the endian-ness of the binary differs from the host machine (\code{.Platform$endian}). #' @details When converting from WKB, the object \code{x} is either a character vector such as typically obtained from PostGIS (either with leading "0x" or without), or a list with raw vectors representing the features in binary (raw) form. #' @examples #' wkb = structure(list("01010000204071000000000000801A064100000000AC5C1441"), class = "WKB") diff --git a/man/gdal_addo.Rd b/man/gdal_addo.Rd index 6a2e9e001..dbad4c003 100644 --- a/man/gdal_addo.Rd +++ b/man/gdal_addo.Rd @@ -2,7 +2,7 @@ % Please edit documentation in R/stars.R \name{gdal_addo} \alias{gdal_addo} -\title{add or remove overviews to/from a raster image} +\title{Add or remove overviews to/from a raster image} \usage{ gdal_addo( file, diff --git a/man/geos_binary_ops.Rd b/man/geos_binary_ops.Rd index af744b891..754a182dd 100644 --- a/man/geos_binary_ops.Rd +++ b/man/geos_binary_ops.Rd @@ -53,7 +53,7 @@ overlapping areas are erased from geometries that are indexed at greater numbers in the argument to \code{x}; geometries that are empty or contained fully inside geometries with higher priority are removed entirely. The \code{st_difference.sfc} method with a single argument returns an object with -an \code{"idx"} attribute with the orginal index for returned geometries. +an \code{"idx"} attribute with the original index for returned geometries. \code{st_snap} snaps the vertices and segments of a geometry to another geometry's vertices. If \code{y} contains more than one geometry, its geometries are merged into a collection before snapping to that collection. @@ -63,7 +63,7 @@ an \code{"idx"} attribute with the orginal index for returned geometries. To find whether pairs of simple feature geometries intersect, use the function \code{\link{st_intersects}} instead of \code{st_intersection}. -When using GEOS and not using s2 polygons contain their boundary. When using s2 this is determined by the \code{model} defaults of \link[s2]{s2_options}, which can be overriden via the ... argument, e.g. \code{model = "closed"} to force DE-9IM compliant behaviour of polygons (and reproduce GEOS results). +When using GEOS and not using s2 polygons contain their boundary. When using s2 this is determined by the \code{model} defaults of \link[s2]{s2_options}, which can be overridden via the ... argument, e.g. \code{model = "closed"} to force DE-9IM compliant behaviour of polygons (and reproduce GEOS results). } \examples{ set.seed(131) diff --git a/man/geos_binary_pred.Rd b/man/geos_binary_pred.Rd index 84ff2148e..f41859698 100644 --- a/man/geos_binary_pred.Rd +++ b/man/geos_binary_pred.Rd @@ -56,7 +56,7 @@ st_is_within_distance(x, y = x, dist, sparse = TRUE, ...) \item{y}{object of class \code{sf}, \code{sfc} or \code{sfg}; if missing, \code{x} is used} -\item{sparse}{logical; should a sparse index list be returned (TRUE) or a dense logical matrix? See below.} +\item{sparse}{logical; should a sparse index list be returned (\code{TRUE}) or a dense logical matrix? See below.} \item{...}{ Arguments passed on to \code{\link[s2:s2_options]{s2::s2_options}} @@ -88,14 +88,14 @@ that can used to constrain the output of \code{\link[s2:s2_rebuild]{s2_rebuild() boolean operation.} }} -\item{prepared}{logical; prepare geometry for x, before looping over y? See Details.} +\item{prepared}{logical; prepare geometry for \code{x}, before looping over \code{y}? See Details.} \item{model}{character; polygon/polyline model; one of "open", "semi-open" or "closed"; see Details.} -\item{retain_unique}{logical; if TRUE (and y is missing) return only indexes of points larger than the current index; this can be used to select unique geometries, see examples. This argument can be used for all geometry predictates; see als \link{distinct.sf} to find records where geometries AND attributes are distinct.} +\item{retain_unique}{logical; if \code{TRUE} (and \code{y} is missing) return only indexes of points larger than the current index; this can be used to select unique geometries, see examples. This argument can be used for all geometry predicates; see also \link{distinct.sf} to find records where geometries AND attributes are distinct.} -\item{remove_self}{logical; if TRUE (and y is missing) return only indexes of geometries different from the current index; this can be used to omit self-intersections; see examples. This argument can be used for all geometry predictates} +\item{remove_self}{logical; if \code{TRUE} (and \code{y} is missing) return only indexes of geometries different from the current index; this can be used to omit self-intersections; see examples. This argument can be used for all geometry predicates} \item{par}{numeric; parameter used for "equals_exact" (margin);} diff --git a/man/geos_combine.Rd b/man/geos_combine.Rd index 7fe6e7d2e..716b26d40 100644 --- a/man/geos_combine.Rd +++ b/man/geos_combine.Rd @@ -17,9 +17,9 @@ st_union(x, y, ..., by_feature = FALSE, is_coverage = FALSE) \item{...}{ignored} -\item{by_feature}{logical; if TRUE, union each feature if \code{y} is missing or else each pair of features; if FALSE return a single feature that is the geometric union of the set of features in \code{x} if \code{y} is missing, or else the unions of each of the elements of the Cartesian product of both sets} +\item{by_feature}{logical; if \code{TRUE}, union each feature if \code{y} is missing or else each pair of features; if \code{FALSE} return a single feature that is the geometric union of the set of features in \code{x} if \code{y} is missing, or else the unions of each of the elements of the Cartesian product of both sets} -\item{is_coverage}{logical; if TRUE, use an optimized algorithm for features that form a polygonal coverage (have no overlaps)} +\item{is_coverage}{logical; if \code{TRUE}, use an optimized algorithm for features that form a polygonal coverage (have no overlaps)} } \value{ \code{st_combine} returns a single, combined geometry, with no resolved boundaries; returned geometries may well be invalid. @@ -32,9 +32,14 @@ Combine several feature geometries into one, without unioning or resolving inter \details{ \code{st_combine} combines geometries without resolving borders, using \link{c.sfg} (analogous to \link[base]{c} for ordinary vectors). -If \code{st_union} is called with a single argument, \code{x}, (with \code{y} missing) and \code{by_feature} is \code{FALSE} all geometries are unioned together and an \code{sfg} or single-geometry \code{sfc} object is returned. If \code{by_feature} is \code{TRUE} each feature geometry is unioned individually. This can for instance be used to resolve internal boundaries after polygons were combined using \code{st_combine}. If \code{y} is provided, all elements of \code{x} and \code{y} are unioned, pairwise if \code{by_feature} is TRUE, or else as the Cartesian product of both sets. +If \code{st_union} is called with a single argument, \code{x}, (with \code{y} missing) and \code{by_feature} is \code{FALSE} all geometries are unioned together and an \code{sfg} or single-geometry \code{sfc} object is returned. +If \code{by_feature} is \code{TRUE} each feature geometry is unioned individually. +This can for instance be used to resolve internal boundaries after polygons were combined using \code{st_combine}. +If \code{y} is provided, all elements of \code{x} and \code{y} are unioned, pairwise if \code{by_feature} is TRUE, or else as the Cartesian product of both sets. -Unioning a set of overlapping polygons has the effect of merging the areas (i.e. the same effect as iteratively unioning all individual polygons together). Unioning a set of LineStrings has the effect of fully noding and dissolving the input linework. In this context "fully noded" means that there will be a node or endpoint in the output for every endpoint or line segment crossing in the input. "Dissolved" means that any duplicate (e.g. coincident) line segments or portions of line segments will be reduced to a single line segment in the output. Unioning a set of Points has the effect of merging all identical points (producing a set with no duplicates). +Unioning a set of overlapping polygons has the effect of merging the areas (i.e. the same effect as iteratively unioning all individual polygons together). +Unioning a set of LineStrings has the effect of fully noding and dissolving the input linework. In this context "fully noded" means that there will be a node or endpoint in the output for every endpoint or line segment crossing in the input. +"Dissolved" means that any duplicate (e.g. coincident) line segments or portions of line segments will be reduced to a single line segment in the output. Unioning a set of Points has the effect of merging all identical points (producing a set with no duplicates). } \examples{ nc = st_read(system.file("shape/nc.shp", package="sf")) diff --git a/man/nc.Rd b/man/nc.Rd index 1b527acb6..e35c62751 100644 --- a/man/nc.Rd +++ b/man/nc.Rd @@ -10,8 +10,8 @@ A \code{sf} object \description{ Sudden Infant Death Syndrome (SIDS) sample data for North Carolina counties, two time periods (1974-78 and 1979-84). The details of the columns can be -found on the seealso URL, spdep package's vignette. Please note that, -though this is basically the same as \code{nc.sids} dataset in spData +found in a \href{https://r-spatial.github.io/spdep/articles/sids.html}{spdep packages vignette}. +Please note that, though this is basically the same as \code{nc.sids} dataset in spData package, \code{nc} only contains a subset of variables. The differences are also discussed on the vignette. } diff --git a/man/plot.Rd b/man/plot.Rd index f34449fca..637513a51 100644 --- a/man/plot.Rd +++ b/man/plot.Rd @@ -169,7 +169,7 @@ sf.colors(n = 10, cutoff.tails = c(0.35, 0.2), alpha = 1, categorical = FALSE) \item{breaks}{either a numeric vector with the actual breaks, or a name of a method accepted by the \code{style} argument of \link[classInt]{classIntervals}} -\item{max.plot}{integer; lower boundary to maximum number of attributes to plot; the default value (9) can be overriden by setting the global option \code{sf_max.plot}, e.g. \code{options(sf_max.plot=2)}} +\item{max.plot}{integer; lower boundary to maximum number of attributes to plot; the default value (9) can be overridden by setting the global option \code{sf_max.plot}, e.g. \code{options(sf_max.plot=2)}} \item{key.pos}{numeric; side to plot a color key: 1 bottom, 2 left, 3 top, 4 right; set to \code{NULL} to omit key completely, 0 to only not plot the key, or -1 to select automatically. If multiple columns are plotted in a single function call by default no key is plotted and every submap is stretched individually; if a key is requested (and \code{col} is missing) all maps are colored according to a single key. Auto select depends on plot size, map aspect, and, if set, parameter \code{asp}. If it has lenght 2, the second value, ranging from 0 to 1, determines where the key is placed in the available space (default: 0.5, center).} diff --git a/man/proj_tools.Rd b/man/proj_tools.Rd index 26454aa55..d7723d841 100644 --- a/man/proj_tools.Rd +++ b/man/proj_tools.Rd @@ -26,15 +26,13 @@ sf_proj_pipelines( \arguments{ \item{paths}{the search path to be set; omit if paths need to be queried} -\item{with_proj}{logical; if \code{NA} set for both GDAL and PROJ, otherwise set either for PROJ (TRUE) or GDAL (FALSE)} +\item{with_proj}{logical; if \code{NA} set for both GDAL and PROJ, otherwise set either for PROJ (\code{TRUE}) or GDAL (\code{FALSE})} -\item{enable}{logical; set this to enable (TRUE) or disable (FALSE) the proj network search facility} +\item{enable}{logical; set this to enable (\code{TRUE}) or disable (\code{FALSE}) the proj network search facility} \item{url}{character; use this to specify and override the default proj network CDN} -\item{source_crs}{object of class \code{crs} or character} - -\item{target_crs}{object of class \code{crs} or character} +\item{source_crs, target_crs}{object of class \code{crs} or character} \item{authority}{character; constrain output pipelines to those of authority} @@ -55,15 +53,15 @@ networking is enabled.)} \item{desired_accuracy}{numeric; only return pipelines with at least this accuracy} -\item{strict_containment}{logical; default FALSE; permit partial matching of the area -of interest; if TRUE strictly contain the area of interest. +\item{strict_containment}{logical; default \code{FALSE}; permit partial matching of the area +of interest; if \code{TRUE} strictly contain the area of interest. The area of interest is either as given in AOI, or as implied by the source/target coordinate reference systems} -\item{axis_order_authority_compliant}{logical; if FALSE always +\item{axis_order_authority_compliant}{logical; if \code{FALSE} always choose ‘x’ or longitude for the first axis; if TRUE, follow the axis orders given by the coordinate reference systems when -constructing the for the first axis; if FALSE, follow the axis orders given by} +constructing the for the first axis; if \code{FALSE}, follow the axis orders given by} } \value{ \code{sf_proj_search_paths()} returns the search path (possibly after setting it) @@ -72,7 +70,7 @@ constructing the for the first axis; if FALSE, follow the axis orders given by} network search of datum grids is enabled, when called with arguments it returns a character vector with the URL of the CDN used (or specified with \code{url}). -\code{sf_proj_pipelines} returns a table with candidate coordinate transformation +\code{sf_proj_pipelines()} returns a table with candidate coordinate transformation pipelines along with their accuracy; \code{NA} accuracy indicates ballpark accuracy. } \description{ diff --git a/man/st.Rd b/man/st.Rd index f40e0da1e..f2e8d9f62 100644 --- a/man/st.Rd +++ b/man/st.Rd @@ -55,7 +55,7 @@ st_geometrycollection(x = list(), dims = "XY") \item{recursive}{logical; ignored} -\item{flatten}{logical; if TRUE, try to simplify results; if FALSE, return geometrycollection containing all objects} +\item{flatten}{logical; if \code{TRUE}, try to simplify results; if \code{FALSE}, return geometrycollection containing all objects} } \value{ object of the same nature as \code{x}, but with appropriate class attribute set diff --git a/man/st_as_sfc.Rd b/man/st_as_sfc.Rd index 87da48250..6b71a53b3 100644 --- a/man/st_as_sfc.Rd +++ b/man/st_as_sfc.Rd @@ -76,11 +76,11 @@ st_as_sfc(x, ...) \item{...}{further arguments} -\item{EWKB}{logical; if TRUE, parse as EWKB (extended WKB; PostGIS: ST_AsEWKB), otherwise as ISO WKB (PostGIS: ST_AsBinary)} +\item{EWKB}{logical; if \code{TRUE}, parse as EWKB (extended WKB; PostGIS: ST_AsEWKB), otherwise as ISO WKB (PostGIS: ST_AsBinary)} \item{spatialite}{logical; if \code{TRUE}, WKB is assumed to be in the spatialite dialect, see \url{https://www.gaia-gis.it/gaia-sins/BLOB-Geometry.html}; this is only supported in native endian-ness (i.e., files written on system with the same endian-ness as that on which it is being read).} -\item{pureR}{logical; if TRUE, use only R code, if FALSE, use compiled (C++) code; use TRUE when the endian-ness of the binary differs from the host machine (\code{.Platform$endian}).} +\item{pureR}{logical; if \code{TRUE}, use only R code, if \code{FALSE}, use compiled (C++) code; use \code{TRUE} when the endian-ness of the binary differs from the host machine (\code{.Platform$endian}).} \item{crs}{coordinate reference system to be assigned; object of class \code{crs}} diff --git a/man/st_break_antimeridian.Rd b/man/st_break_antimeridian.Rd index b6752fae7..65088b492 100644 --- a/man/st_break_antimeridian.Rd +++ b/man/st_break_antimeridian.Rd @@ -13,13 +13,13 @@ st_break_antimeridian(x, lon_0 = 0, tol = 1e-04, ...) \method{st_break_antimeridian}{sfc}(x, lon_0 = 0, tol = 1e-04, ...) } \arguments{ -\item{x}{object of class sf or sfc} +\item{x}{object of class \code{sf} or \code{sfc}} \item{lon_0}{target central longitude (degrees)} \item{tol}{half of break width (degrees, default 0.0001)} -\item{...}{ingnored here} +\item{...}{ignored here} } \description{ Longitudes can be broken at the antimeridian of a target central longitude diff --git a/man/st_is_longlat.Rd b/man/st_is_longlat.Rd index 926baf137..bb747aee5 100644 --- a/man/st_is_longlat.Rd +++ b/man/st_is_longlat.Rd @@ -10,7 +10,7 @@ st_is_longlat(x) \item{x}{object of class \link{sf} or \link{sfc}, or otherwise an object of a class that has an \link{st_crs} method returning a \code{crs} object} } \value{ -TRUE if x has geographic coordinates, FALSE if it has projected coordinates, or NA if \code{is.na(st_crs(x))}. +\code{TRUE} if \code{x} has geographic coordinates, \code{FALSE} if it has projected coordinates, or \code{NA} if \code{is.na(st_crs(x))}. } \description{ Assert whether simple feature coordinates are longlat degrees diff --git a/man/st_precision.Rd b/man/st_precision.Rd index f414cd535..7175b8a2b 100644 --- a/man/st_precision.Rd +++ b/man/st_precision.Rd @@ -27,7 +27,10 @@ Set precision \details{ If \code{precision} is a \code{units} object, the object on which we set precision must have a coordinate reference system with compatible distance units. -Setting a \code{precision} has no direct effect on coordinates of geometries, but merely set an attribute tag to an \code{sfc} object. The effect takes place in \link{st_as_binary} or, more precise, in the C++ function \code{CPL_write_wkb}, where simple feature geometries are being serialized to well-known-binary (WKB). This happens always when routines are called in GEOS library (geometrical operations or predicates), for writing geometries using \link{st_write} or \link{write_sf}, \code{st_make_valid} in package \code{lwgeom}; also \link{aggregate} and \link{summarise} by default union geometries, which calls a GEOS library function. Routines in these libraries receive rounded coordinates, and possibly return results based on them. \link{st_as_binary} contains an example of a roundtrip of \code{sfc} geometries through WKB, in order to see the rounding happening to R data. +Setting a \code{precision} has no direct effect on coordinates of geometries, but merely set an attribute tag to an \code{sfc} object. +The effect takes place in \link{st_as_binary} or, more precise, in the C++ function \code{CPL_write_wkb}, where simple feature geometries are being serialized to well-known-binary (WKB). +This happens always when routines are called in GEOS library (geometrical operations or predicates), for writing geometries using \link{st_write} or \link{write_sf}, \code{st_make_valid} in package \code{lwgeom}; also \link{aggregate} and \link{summarise} by default union geometries, which calls a GEOS library function. +Routines in these libraries receive rounded coordinates, and possibly return results based on them. \link{st_as_binary} contains an example of a roundtrip of \code{sfc} geometries through WKB, in order to see the rounding happening to R data. The reason to support precision is that geometrical operations in GEOS or liblwgeom may work better at reduced precision. For writing data from R to external resources it is harder to think of a good reason to limiting precision. } diff --git a/man/st_read.Rd b/man/st_read.Rd index a6640a6b7..beadecafa 100644 --- a/man/st_read.Rd +++ b/man/st_read.Rd @@ -65,7 +65,7 @@ use the \code{query} argument.} \item{options}{character; driver dependent dataset open options, multiple options supported. For possible values, see the "Open options" section of the GDAL documentation of the corresponding driver, and -https://github.com/r-spatial/sf/issues/1157 for an example.} +\url{https://github.com/r-spatial/sf/issues/1157} for an example.} \item{quiet}{logical; suppress info on name, driver, size and spatial reference, or signaling no or multiple layers} @@ -88,11 +88,11 @@ converted to factors? Default for \code{read_sf} or R version >= 4.1.0 is \code{FALSE}, for \code{st_read} and R version < 4.1.0 equal to \code{default.stringsAsFactors()}} -\item{int64_as_string}{logical; if TRUE, Int64 attributes are returned as -string; if FALSE, they are returned as double and a warning is given when +\item{int64_as_string}{logical; if \code{TRUE}, Int64 attributes are returned as +string; if \code{FALSE}, they are returned as double and a warning is given when precision is lost (i.e., values are larger than 2^53).} -\item{check_ring_dir}{logical; if TRUE, polygon ring directions are checked +\item{check_ring_dir}{logical; if \code{TRUE}, polygon ring directions are checked and if necessary corrected (when seen from above: exterior ring counter clockwise, holes clockwise)} @@ -104,7 +104,7 @@ clockwise, holes clockwise)} \item{optional}{logical; passed to \link[base]{as.data.frame}; always \code{TRUE} when \code{as_tibble} is \code{TRUE}} -\item{use_stream}{Use TRUE to use the experimental columnar interface introduced in GDAL 3.6.} +\item{use_stream}{Use \code{TRUE} to use the experimental columnar interface introduced in GDAL 3.6.} \item{as_tibble}{logical; should the returned table be of class tibble or data.frame?} diff --git a/man/st_relate.Rd b/man/st_relate.Rd index 99bb9563c..ddb70f550 100644 --- a/man/st_relate.Rd +++ b/man/st_relate.Rd @@ -13,7 +13,7 @@ st_relate(x, y, pattern = NA_character_, sparse = !is.na(pattern)) \item{pattern}{character; define the pattern to match to, see details.} -\item{sparse}{logical; should a sparse matrix be returned (TRUE) or a dense matrix?} +\item{sparse}{logical; should a sparse matrix be returned (\code{TRUE}) or a dense matrix?} } \value{ In case \code{pattern} is not given, \code{st_relate} returns a dense \code{character} matrix; element \verb{[i,j]} has nine characters, referring to the DE9-IM relationship between \code{x[i]} and \code{y[j]}, encoded as IxIy,IxBy,IxEy,BxIy,BxBy,BxEy,ExIy,ExBy,ExEy where I refers to interior, B to boundary, and E to exterior, and e.g. BxIy the dimensionality of the intersection of the the boundary of \code{x[i]} and the interior of \code{y[j]}, which is one of: 0, 1, 2, or F; digits denoting dimensionality of intersection, F denoting no intersection. When \code{pattern} is given, a dense logical matrix or sparse index list returned with matches to the given pattern; see \link{st_intersection} for a description of the returned matrix or list. See also \url{https://en.wikipedia.org/wiki/DE-9IM} for further explanation. diff --git a/man/st_transform.Rd b/man/st_transform.Rd index f848bad26..95cd81e98 100644 --- a/man/st_transform.Rd +++ b/man/st_transform.Rd @@ -95,7 +95,7 @@ Transforms using the \code{pipeline=} argument may fail if there is ambiguity in the axis order of the specified coordinate reference system; if you need the traditional GIS order, use \code{"OGC:CRS84"}, not \code{"EPSG:4326"}. Extra care is needed with the ESRI Shapefile format, -because WKT1 does not store axis order unambigiously. +because WKT1 does not store axis order unambiguously. The \code{st_transform} method for \code{sfg} objects assumes that the CRS of the object is available as an attribute of that name. diff --git a/man/st_write.Rd b/man/st_write.Rd index a7d8d49c3..eee3b3ae0 100644 --- a/man/st_write.Rd +++ b/man/st_write.Rd @@ -111,8 +111,8 @@ When deleting layers or data sources is not successful, no error is emitted. \code{delete_dsn} and \code{delete_layer} should be handled with care; the former may erase complete directories or databases. -st_delete deletes layer(s) in a data source, or a data source if layers are -omitted; it returns TRUE on success, FALSE on failure, invisibly. +\code{st_delete()} deletes layer(s) in a data source, or a data source if layers are +omitted; it returns \code{TRUE} on success, \code{FALSE} on failure, invisibly. } \examples{ nc = st_read(system.file("shape/nc.shp", package="sf")) diff --git a/man/st_zm.Rd b/man/st_zm.Rd index b5f536008..c398d1062 100644 --- a/man/st_zm.Rd +++ b/man/st_zm.Rd @@ -11,7 +11,7 @@ st_zm(x, ..., drop = TRUE, what = "ZM") \item{...}{ignored} -\item{drop}{logical; drop, or (FALSE) add?} +\item{drop}{logical; drop, or (\code{FALSE}) add?} \item{what}{character which dimensions to drop or add} } @@ -19,7 +19,8 @@ st_zm(x, ..., drop = TRUE, what = "ZM") Drop Z and/or M dimensions from feature geometries, resetting classes appropriately } \details{ -Only combinations \code{drop=TRUE}, \code{what = "ZM"}, and \code{drop=FALSE}, \code{what="Z"} are supported so far. In case \code{add=TRUE}, \code{x} should have \code{XY} geometry, and zero values are added for \code{Z}. +Only combinations \code{drop=TRUE}, \code{what = "ZM"}, and \code{drop=FALSE}, \code{what="Z"} are supported so far. +In case \code{add=TRUE}, \code{x} should have \code{XY} geometry, and zero values are added for \code{Z}. } \examples{ st_zm(st_linestring(matrix(1:32,8))) diff --git a/man/tidyverse.Rd b/man/tidyverse.Rd index 03d5106a7..c9c726b7d 100644 --- a/man/tidyverse.Rd +++ b/man/tidyverse.Rd @@ -2,7 +2,6 @@ % Please edit documentation in R/tidyverse.R, R/join.R \name{tidyverse} \alias{tidyverse} -\alias{group_split.sf} \alias{filter.sf} \alias{arrange.sf} \alias{group_by.sf} @@ -23,6 +22,7 @@ \alias{spread.sf} \alias{sample_n.sf} \alias{sample_frac.sf} +\alias{group_split.sf} \alias{nest.sf} \alias{separate.sf} \alias{separate_rows.sf} @@ -37,8 +37,6 @@ \alias{anti_join.sf} \title{Tidyverse methods for sf objects (remove .sf suffix!)} \usage{ -group_split.sf(.tbl, ..., .keep = TRUE) - filter.sf(.data, ..., .dots) arrange.sf(.data, ..., .dots) @@ -131,6 +129,8 @@ sample_frac.sf( .env = parent.frame() ) +group_split.sf(.tbl, ..., .keep = TRUE) + nest.sf(.data, ...) separate.sf( @@ -166,22 +166,10 @@ semi_join.sf(x, y, by = NULL, copy = FALSE, suffix = c(".x", ".y"), ...) anti_join.sf(x, y, by = NULL, copy = FALSE, suffix = c(".x", ".y"), ...) } \arguments{ -\item{...}{other arguments} - -\item{.keep}{Should the join keys from both \code{x} and \code{y} be preserved in the -output? -\itemize{ -\item If \code{NULL}, the default, joins on equality retain only the keys from \code{x}, -while joins on inequality retain the keys from both inputs. -\item If \code{TRUE}, all keys from both inputs are retained. -\item If \code{FALSE}, only keys from \code{x} are retained. For right and full joins, -the data in key columns corresponding to rows that only exist in \code{y} are -merged into the key columns from \code{x}. Can't be used when joining on -inequality conditions. -}} - \item{.data}{data object of class \link{sf}} +\item{...}{other arguments} + \item{.dots}{see corresponding function in package \code{dplyr}} \item{add}{see corresponding function in dplyr} @@ -270,6 +258,10 @@ more details.} \item{.env}{see original function docs} +\item{.tbl}{see original function docs} + +\item{.keep}{see original function docs} + \item{col}{see \link[tidyr]{separate}} \item{into}{see \link[tidyr]{separate}} From ca3859aa17132410fbee205f11df08e3748d702c Mon Sep 17 00:00:00 2001 From: olivroy Date: Wed, 17 Jan 2024 18:56:47 -0500 Subject: [PATCH 4/5] Typos + documentation edits. --- R/tidyverse.R | 2 +- man/tidyverse.Rd | 4 ++-- sf.Rproj | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/R/tidyverse.R b/R/tidyverse.R index 66bd36f80..f0c4c8cfe 100644 --- a/R/tidyverse.R +++ b/R/tidyverse.R @@ -224,6 +224,7 @@ rename.sf <- function(.data, ...) { } #' @name tidyverse +#' @param .fn,.cols see original docs rename_with.sf = function(.data, .fn, .cols, ...) { if (!requireNamespace("rlang", quietly = TRUE)) stop("rlang required: install that first") # nocov @@ -404,7 +405,6 @@ gather.sf <- function(data, key, value, ..., na.rm = FALSE, convert = FALSE, fac } #' @name tidyverse -#' @param template see original function docs #' @param data see original function docs #' @param cols see original function docs #' @param names_to see original function docs diff --git a/man/tidyverse.Rd b/man/tidyverse.Rd index c9c726b7d..5a1ee0be7 100644 --- a/man/tidyverse.Rd +++ b/man/tidyverse.Rd @@ -178,6 +178,8 @@ anti_join.sf(x, y, by = NULL, copy = FALSE, suffix = c(".x", ".y"), ...) lazy data frames (e.g. from dbplyr or dtplyr). See \emph{Methods}, below, for more details.} +\item{.fn, .cols}{see original docs} + \item{do_union}{logical; in case \code{summary} does not create a geometry column, should geometries be created by unioning using \link{st_union}, or simply by combining using \link{st_combine}? Using \link{st_union} resolves internal boundaries, but in case of unioning points, this will likely change the order of the points; see Details.} \item{is_coverage}{logical; if \code{do_union} is \code{TRUE}, use an optimized algorithm for features that form a polygonal coverage (have no overlaps)} @@ -309,8 +311,6 @@ it is a potentially expensive operation so you must opt into it.} \item{suffix}{If there are non-joined duplicate variables in \code{x} and \code{y}, these suffixes will be added to the output to disambiguate them. Should be a character vector of length 2.} - -\item{template}{see original function docs} } \value{ an object of class \link{sf} diff --git a/sf.Rproj b/sf.Rproj index 89df4dd41..2a934b1fe 100644 --- a/sf.Rproj +++ b/sf.Rproj @@ -20,3 +20,4 @@ PackageUseDevtools: Yes PackageInstallArgs: --no-multiarch --with-keep.source PackageRoxygenize: rd,collate,namespace +SpellingDictionary: en_US From f6c6b6abf0672706f3b9e1217f183b74bbeb5dae Mon Sep 17 00:00:00 2001 From: olivroy <52606734+olivroy@users.noreply.github.com> Date: Thu, 18 Jan 2024 09:15:31 -0500 Subject: [PATCH 5/5] Fix typos --- R/datasets.R | 2 +- R/shift_longitude.R | 2 +- R/tidyverse.R | 1 - 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/R/datasets.R b/R/datasets.R index 444e05a36..d1fa74c47 100644 --- a/R/datasets.R +++ b/R/datasets.R @@ -2,7 +2,7 @@ #' #' Sudden Infant Death Syndrome (SIDS) sample data for North Carolina counties, #' two time periods (1974-78 and 1979-84). The details of the columns can be -#' found in a [spdep packages vignette](https://r-spatial.github.io/spdep/articles/sids.html). +#' found in a [spdep package vignette](https://r-spatial.github.io/spdep/articles/sids.html). #' Please note that, though this is basically the same as \code{nc.sids} dataset in spData #' package, \code{nc} only contains a subset of variables. The differences are #' also discussed on the vignette. diff --git a/R/shift_longitude.R b/R/shift_longitude.R index 440b370f7..d5e96e7c4 100644 --- a/R/shift_longitude.R +++ b/R/shift_longitude.R @@ -9,7 +9,7 @@ #' equivalent of [recenter][sp::recenter] in the sp package and #' `ST_ShiftLongitude` in PostGIS. #' -#' @param x object of class `sf` or `sfc`. +#' @param x object of class `sf` or `sfc` #' @param ... ignored #' #' @export diff --git a/R/tidyverse.R b/R/tidyverse.R index f0c4c8cfe..049673fd4 100644 --- a/R/tidyverse.R +++ b/R/tidyverse.R @@ -5,7 +5,6 @@ # This is currently only used in `bind_rows()` and `bind_cols()` # because sf overrides all default implementations - dplyr_reconstruct.sf = function(data, template) { sfc_name = attr(template, "sf_column") if (inherits(template, "tbl_df"))