Skip to content

Commit

Permalink
Merge pull request #83 from ropensci/testthat-3e
Browse files Browse the repository at this point in the history
Update to `testthat` 3e
  • Loading branch information
jrdnbradford authored Sep 2, 2024
2 parents e27c8ff + c8be029 commit ffe1f73
Show file tree
Hide file tree
Showing 13 changed files with 428 additions and 562 deletions.
3 changes: 2 additions & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,12 @@ Suggests:
knitr,
rmarkdown,
dplyr,
testthat,
testthat (>= 3.0.0),
covr,
curl
Depends:
R (>= 3.0.0)
VignetteBuilder: knitr
RoxygenNote: 7.2.3
Encoding: UTF-8
Config/testthat/edition: 3
11 changes: 10 additions & 1 deletion tests/testthat.R
Original file line number Diff line number Diff line change
@@ -1,4 +1,13 @@
# This file is part of the standard setup for testthat.
# It is recommended that you do not modify it.
#
# Where should you do additional test configuration?
# Learn more about the roles of various files in:
# * https://r-pkgs.org/testing-design.html#sec-tests-files-overview
# * https://testthat.r-lib.org/articles/special-files.html

library(testthat)
if(curl::has_internet()){
library(robotstxt)
if (curl::has_internet()) {
test_check("robotstxt")
}
132 changes: 132 additions & 0 deletions tests/testthat/_snaps/http_event_handling.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
# non www redirects are handled non silently

Code
domain_change <- readRDS(system.file("http_requests/http_domain_change.rds",
package = "robotstxt"))
suppressMessages(get_robotstxt("http://google.com", rt_robotstxt_http_getter = function(
...) {
domain_change
}, warn = TRUE))
Condition
Warning in `request_handler_handler()`:
Event: on_file_type_mismatch
Warning in `request_handler_handler()`:
Event: on_suspect_content
Output
[robots.txt]
--------------------------------------
# robots.txt overwrite by: on_suspect_content
User-agent: *
Allow: /
[events]
--------------------------------------
requested: www.petermeissner.de
downloaded: https://petermeissner.de/
$on_redirect
$on_redirect[[1]]
$on_redirect[[1]]$status
[1] 301
$on_redirect[[1]]$location
[1] "https://www.petermeissner.de/"
$on_redirect[[2]]
$on_redirect[[2]]$status
[1] 301
$on_redirect[[2]]$location
[1] "https://petermeissner.de/"
$on_redirect[[3]]
$on_redirect[[3]]$status
[1] 200
$on_redirect[[3]]$location
NULL
$on_file_type_mismatch
$on_file_type_mismatch$content_type
[1] "text/html"
$on_suspect_content
$on_suspect_content$parsable
[1] FALSE
$on_suspect_content$content_suspect
[1] TRUE
[attributes]
--------------------------------------
problems, cached, request, class

# client error

Code
http_client_error <- readRDS(system.file("http_requests/http_client_error.rds",
package = "robotstxt"))
suppressMessages(get_robotstxt("httpbin.org", rt_robotstxt_http_getter = function(
...) {
http_client_error
}))
Condition
Warning in `request_handler_handler()`:
Event: on_client_error
Warning in `request_handler_handler()`:
Event: on_file_type_mismatch
Output
[robots.txt]
--------------------------------------
# robots.txt overwrite by: on_client_error
User-agent: *
Allow: /
[events]
--------------------------------------
requested: https://httpbin.org/status/400
downloaded: https://httpbin.org/status/400
$on_client_error
$on_client_error$status_code
[1] 400
$on_file_type_mismatch
$on_file_type_mismatch$content_type
[1] "text/html; charset=utf-8"
[attributes]
--------------------------------------
problems, cached, request, class

# server error

Code
res <- suppressMessages(get_robotstxt("httpbin.org", rt_robotstxt_http_getter = f,
on_server_error = list(signal = "warning"), force = TRUE))
Condition
Warning in `request_handler_handler()`:
Event: on_server_error
Warning in `request_handler_handler()`:
Event: on_file_type_mismatch

31 changes: 31 additions & 0 deletions tests/testthat/_snaps/paths_allowed.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# paths_allowed() works also with 'downloaded' robots.txt files

Code
domain_change <- readRDS(system.file("http_requests/http_domain_change.rds",
package = "robotstxt"))
suppressMessages(paths_allowed(paths = "https://github.io/index.html",
rt_robotstxt_http_getter = function(...) {
domain_change
}, warn = FALSE))
Output
[1] TRUE

---

Code
domain_change <- readRDS(system.file("http_requests/http_domain_change.rds",
package = "robotstxt"))
suppressMessages(paths_allowed(paths = "https://github.io/index.html",
rt_robotstxt_http_getter = function(...) {
domain_change
}))
Condition
Warning in `request_handler_handler()`:
Event: on_domain_change
Warning in `request_handler_handler()`:
Event: on_file_type_mismatch
Warning in `request_handler_handler()`:
Event: on_suspect_content
Output
[1] TRUE

90 changes: 47 additions & 43 deletions tests/testthat/test_attribute_handling.R
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
context("attribute handling")

test_that("get_robotstxt produces attributes", {

expect_true({
www_redirect <- readRDS(system.file("http_requests/http_redirect_www.rds", package = "robotstxt"))

suppressWarnings(
rtxt <-
get_robotstxt(
"http://google.com",
rt_robotstxt_http_getter = function(...){www_redirect}
suppressMessages(
suppressWarnings(
rtxt <-
get_robotstxt(
"http://google.com",
rt_robotstxt_http_getter = function(...){www_redirect}
)
)
)

Expand All @@ -19,12 +19,14 @@ test_that("get_robotstxt produces attributes", {
expect_true({
http_404 <- readRDS(system.file("http_requests/http_404.rds", package = "robotstxt"))

suppressWarnings(
rtxt <-
get_robotstxt(
"http://google.com",
rt_robotstxt_http_getter = function(...){http_404}
)
suppressMessages(
suppressWarnings(
rtxt <-
get_robotstxt(
"http://google.com",
rt_robotstxt_http_getter = function(...){http_404}
)
)
)

"problems" %in% names(attributes(rtxt))
Expand All @@ -34,12 +36,14 @@ test_that("get_robotstxt produces attributes", {
expect_true({
http_ok <- readRDS(system.file("http_requests/http_ok_1.rds", package = "robotstxt"))

suppressWarnings(
rtxt <-
get_robotstxt(
"http://google.com",
rt_robotstxt_http_getter = function(...){http_404}
)
suppressMessages(
suppressWarnings(
rtxt <-
get_robotstxt(
"http://google.com",
rt_robotstxt_http_getter = function(...){http_404}
)
)
)

"problems" %in% names(attributes(rtxt))
Expand All @@ -49,28 +53,31 @@ test_that("get_robotstxt produces attributes", {
expect_true({
http_ok <- readRDS(system.file("http_requests/http_ok_2.rds", package = "robotstxt"))

suppressWarnings(
rtxt <-
get_robotstxt(
"http://google.com",
rt_robotstxt_http_getter = function(...){http_404}
)
suppressMessages(
suppressWarnings(
rtxt <-
get_robotstxt(
"http://google.com",
rt_robotstxt_http_getter = function(...){http_404}
)
)
)

"problems" %in% names(attributes(rtxt))
})



expect_true({
http_ok <- readRDS(system.file("http_requests/http_ok_3.rds", package = "robotstxt"))

suppressWarnings(
rtxt <-
get_robotstxt(
"http://google.com",
rt_robotstxt_http_getter = function(...){http_404}
)
suppressMessages(
suppressWarnings(
rtxt <-
get_robotstxt(
"http://google.com",
rt_robotstxt_http_getter = function(...){http_404}
)
)
)

"problems" %in% names(attributes(rtxt))
Expand All @@ -80,20 +87,17 @@ test_that("get_robotstxt produces attributes", {
expect_true({
http_ok <- readRDS(system.file("http_requests/http_ok_4.rds", package = "robotstxt"))

suppressWarnings(
rtxt <-
get_robotstxt(
"http://google.com",
rt_robotstxt_http_getter = function(...){http_404}
)
suppressMessages(
suppressWarnings(
rtxt <-
get_robotstxt(
"http://google.com",
rt_robotstxt_http_getter = function(...){http_404}
)
)
)

"problems" %in% names(attributes(rtxt))
})



})



15 changes: 7 additions & 8 deletions tests/testthat/test_get_robotstxt.R
Original file line number Diff line number Diff line change
@@ -1,8 +1,3 @@
# testing the workings of get_robotstxt function

context("get_robotstxt()")


test_that(
"NA in NA out", {
expect_true({
Expand All @@ -12,7 +7,7 @@ test_that(
expect_true({
all(
is.na(
get_robotstxts(domain = c(NA, NA))
suppressMessages(get_robotstxts(domain = c(NA, NA)))
)
)
})
Expand All @@ -29,9 +24,13 @@ test_that(
})

expect_true({
suppressWarnings(get_robotstxts(domain = c("example.com", "example.com")))
suppressMessages(
suppressWarnings(
get_robotstxts(domain = c("example.com", "example.com"))
)
)
TRUE
})
}
}
)
)
Loading

0 comments on commit ffe1f73

Please sign in to comment.