@@ -113,11 +113,35 @@ url_parameters <- function(urls, parameter_names) {
113
113
# '@examples
114
114
# 'url_parse("https://en.wikipedia.org/wiki/Article")
115
115
# '
116
- # '@seealso \code{url_parameters} for extracting values associated with particular keys in a URL's
117
- # 'query string.
116
+ # '@seealso \code{\link{ url_parameters} } for extracting values associated with particular keys in a URL's
117
+ # 'query string, and \code{\link{url_compose}}, which is \code{url_parse} in reverse .
118
118
# '
119
119
# '@export
120
120
url_parse <- function (urls ) {
121
121
.Call(' urltools_url_parse' , PACKAGE = ' urltools' , urls )
122
122
}
123
123
124
+ # '@title Recompose Parsed URLs
125
+ # '
126
+ # '@description Sometimes you want to take a vector of URLs, parse them, perform
127
+ # 'some operations and then rebuild them. \code{url_compose} takes a data.frame produced
128
+ # 'by \code{\link{url_parse}} and rebuilds it into a vector of full URLs (or: URLs as full
129
+ # 'as the vector initially thrown into url_parse).
130
+ # '
131
+ # 'This is currently a `beta` feature; please do report bugs if you find them.
132
+ # '
133
+ # '@param parsed_urls a data.frame sourced from \code{\link{url_parse}}
134
+ # '
135
+ # '@seealso \code{\link{scheme}} and other accessors, which you may want to
136
+ # 'run URLs through before composing them to modify individual values.
137
+ # '
138
+ # '@examples
139
+ # '#Parse a URL and compose it
140
+ # 'url <- "http://en.wikipedia.org"
141
+ # 'url_compose(url_parse(url))
142
+ # '
143
+ # '@export
144
+ url_compose <- function (parsed_urls ) {
145
+ .Call(' urltools_url_compose' , PACKAGE = ' urltools' , parsed_urls )
146
+ }
147
+
0 commit comments