Binary files 1.98-1.7+dfsg-1/data/mimeTypeExtensions.rda and 1.98-1.8+dfsg-1/data/mimeTypeExtensions.rda differ
diff -pruN 1.98-1.7+dfsg-1/debian/changelog 1.98-1.8+dfsg-1/debian/changelog
--- 1.98-1.7+dfsg-1/debian/changelog	2022-06-24 07:01:58.000000000 +0000
+++ 1.98-1.8+dfsg-1/debian/changelog	2022-08-04 04:45:11.000000000 +0000
@@ -1,3 +1,9 @@
+r-cran-rcurl (1.98-1.8+dfsg-1) unstable; urgency=medium
+
+  * New upstream version
+
+ -- Andreas Tille <tille@debian.org>  Thu, 04 Aug 2022 06:45:11 +0200
+
 r-cran-rcurl (1.98-1.7+dfsg-1) unstable; urgency=medium
 
   * New upstream version
diff -pruN 1.98-1.7+dfsg-1/DESCRIPTION 1.98-1.8+dfsg-1/DESCRIPTION
--- 1.98-1.7+dfsg-1/DESCRIPTION	2022-06-09 09:26:30.000000000 +0000
+++ 1.98-1.8+dfsg-1/DESCRIPTION	2022-07-30 14:52:50.000000000 +0000
@@ -1,5 +1,5 @@
 Package: RCurl
-Version: 1.98-1.7
+Version: 1.98-1.8
 Title: General Network (HTTP/FTP/...) Client Interface for R
 Authors@R: c(person("CRAN Team", role = c('ctb', 'cre'),
                     email = "CRAN@r-project.org",
@@ -30,9 +30,9 @@ Collate: aclassesEnums.R bitClasses.R xb
         multi.S options.S scp.R support.S upload.R urlExists.R zclone.R
         zzz.R
 NeedsCompilation: yes
-Packaged: 2022-06-08 19:12:14 UTC; hornik
+Packaged: 2022-07-30 14:48:30 UTC; ripley
 Author: CRAN Team [ctb, cre] (de facto maintainer since 2013),
   Duncan Temple Lang [aut] (<https://orcid.org/0000-0003-0159-1546>)
 Maintainer: CRAN Team <CRAN@r-project.org>
 Repository: CRAN
-Date/Publication: 2022-06-09 09:26:30 UTC
+Date/Publication: 2022-07-30 14:52:50 UTC
diff -pruN 1.98-1.7+dfsg-1/man/basicHeaderGatherer.Rd 1.98-1.8+dfsg-1/man/basicHeaderGatherer.Rd
--- 1.98-1.7+dfsg-1/man/basicHeaderGatherer.Rd	2021-03-16 14:58:50.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/basicHeaderGatherer.Rd	2022-07-30 14:38:01.000000000 +0000
@@ -51,9 +51,9 @@ parseHTTPHeader(lines, multi = TRUE)
   \code{\link{curlSetOpt}}  
 }
 \examples{
-  if(url.exists("http://www.omegahat.net/RCurl/index.html")) withAutoprint({
+  if(url.exists("https://www.omegahat.net/RCurl/index.html")) withAutoprint({
      h = basicHeaderGatherer()
-     getURI("http://www.omegahat.net/RCurl/index.html",
+     getURI("https://www.omegahat.net/RCurl/index.html",
               headerfunction = h$update)
      names(h$value())
      h$value()
diff -pruN 1.98-1.7+dfsg-1/man/basicTextGatherer.Rd 1.98-1.8+dfsg-1/man/basicTextGatherer.Rd
--- 1.98-1.7+dfsg-1/man/basicTextGatherer.Rd	2022-02-08 14:27:52.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/basicTextGatherer.Rd	2022-07-30 14:38:16.000000000 +0000
@@ -130,17 +130,17 @@ debugGatherer()
   \code{\link{dynCurlReader}}  
 }
 \examples{
-if(url.exists("http://www.omegahat.net/RCurl/index.html")) withAutoprint({
-  txt = getURL("http://www.omegahat.net/RCurl/index.html", write = basicTextGatherer())
+if(url.exists("https://www.omegahat.net/RCurl/index.html")) withAutoprint({
+  txt = getURL("https://www.omegahat.net/RCurl/index.html", write = basicTextGatherer())
 
   h = basicTextGatherer()
-  txt = getURL("http://www.omegahat.net/RCurl/index.html", write = h$update)
+  txt = getURL("https://www.omegahat.net/RCurl/index.html", write = h$update)
     ## Cumulate across pages.
-  txt = getURL("http://www.omegahat.net/index.html", write = h$update)
+  txt = getURL("https://www.omegahat.net/index.html", write = h$update)
 
 
   headers = basicTextGatherer()
-  txt = getURL("http://www.omegahat.net/RCurl/index.html",
+  txt = getURL("https://www.omegahat.net/RCurl/index.html",
                header = TRUE, headerfunction = headers$update)
 
      ## Now read the headers.
@@ -150,15 +150,15 @@ if(url.exists("http://www.omegahat.net/R
 
     ## Debugging callback
   d = debugGatherer()
-  x = getURL("http://www.omegahat.net/RCurl/index.html", debugfunction = d$update, verbose = TRUE)
+  x = getURL("https://www.omegahat.net/RCurl/index.html", debugfunction = d$update, verbose = TRUE)
   cat(names(d$value()))
   d$value()[["headerIn"]]
 
 
     ## This hung on Solaris
     ## 2022-02-08 philosophy.html is malformed UTF-8
-  uris = c("http://www.omegahat.net/RCurl/index.html",
-           "http://www.omegahat.net/RCurl/philosophy.html")
+  uris = c("https://www.omegahat.net/RCurl/index.html",
+           "https://www.omegahat.net/RCurl/philosophy.html")
 \dontrun{
   g = multiTextGatherer(uris)
   txt = getURIAsynchronous(uris,  write = g)
@@ -177,8 +177,8 @@ if(url.exists("http://www.omegahat.net/R
 \dontrun{
  Sys.setlocale(,"en_US.latin1")
  Sys.setlocale(,"en_US.UTF-8")
- uris = c("http://www.omegahat.net/RCurl/index.html",
-          "http://www.omegahat.net/RCurl/philosophy.html")
+ uris = c("https://www.omegahat.net/RCurl/index.html",
+          "https://www.omegahat.net/RCurl/philosophy.html")
  g = multiTextGatherer(uris)
  txt = getURIAsynchronous(uris,  write = g)
 }
diff -pruN 1.98-1.7+dfsg-1/man/binaryBuffer.Rd 1.98-1.8+dfsg-1/man/binaryBuffer.Rd
--- 1.98-1.7+dfsg-1/man/binaryBuffer.Rd	2021-03-16 14:58:50.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/binaryBuffer.Rd	2022-07-30 14:38:27.000000000 +0000
@@ -50,11 +50,11 @@ binaryBuffer(initialSize = 5000)
  \code{R_curl_write_binary_data}
 }
 \examples{
-if(url.exists("http://www.omegahat.net/RCurl/xmlParse.html.gz")) {
+if(url.exists("https://www.omegahat.net/RCurl/xmlParse.html.gz")) {
   buf = binaryBuffer()
 
      # Now fetch the binary file.
-  getURI("http://www.omegahat.net/RCurl/xmlParse.html.gz",
+  getURI("https://www.omegahat.net/RCurl/xmlParse.html.gz",
          write = getNativeSymbolInfo("R_curl_write_binary_data")$address,
          file = buf@ref)
 
diff -pruN 1.98-1.7+dfsg-1/man/chunkToLineReader.Rd 1.98-1.8+dfsg-1/man/chunkToLineReader.Rd
--- 1.98-1.7+dfsg-1/man/chunkToLineReader.Rd	2021-03-16 14:58:50.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/chunkToLineReader.Rd	2022-07-30 14:42:29.000000000 +0000
@@ -68,9 +68,11 @@ function()
 
 s = summer()
 
-if(url.exists("http://www.omegahat.net/RCurl/matrix.data"))
-   getURL("http://www.omegahat.net/RCurl/matrix.data", write = chunkToLineReader(s$read)$read)
-}
+\dontrun{
+## broken, 2022-07-29
+if(url.exists("https://www.omegahat.net/RCurl/matrix.data"))
+   getURL("https://www.omegahat.net/RCurl/matrix.data", write = chunkToLineReader(s$read)$read)
+}}
 \keyword{IO}
 \concept{Web}
 \concept{HTTP}
diff -pruN 1.98-1.7+dfsg-1/man/curlPerform.Rd 1.98-1.8+dfsg-1/man/curlPerform.Rd
--- 1.98-1.7+dfsg-1/man/curlPerform.Rd	2021-03-16 14:58:50.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/curlPerform.Rd	2022-07-30 14:41:06.000000000 +0000
@@ -71,9 +71,9 @@ curlMultiPerform(curl, multiple = TRUE)
  \code{\link{curlSetOpt}}
 }
 \examples{
-if(url.exists("http://www.omegahat.net/RCurl")) withAutoprint({
+if(url.exists("https://www.omegahat.net/RCurl")) withAutoprint({
   h = basicTextGatherer()
-  curlPerform(url = "http://www.omegahat.net/RCurl", writefunction = h$update)
+  curlPerform(url = "https://www.omegahat.net/RCurl", writefunction = h$update)
    # Now read the text that was cumulated during the query response.
   cat(h$value())
 })
@@ -110,9 +110,9 @@ if(url.exists("http://services.soaplite.
 
 
    # Using a C routine as the reader of the body of the response.
-if(url.exists("http://www.omegahat.net/RCurl/index.html")) withAutoprint({
+if(url.exists("https://www.omegahat.net/RCurl/index.html")) withAutoprint({
   routine = getNativeSymbolInfo("R_internalWriteTest", PACKAGE = "RCurl")$address
-  curlPerform(URL = "http://www.omegahat.net/RCurl/index.html",
+  curlPerform(URL = "https://www.omegahat.net/RCurl/index.html",
               writefunction = routine)
 })
 }
diff -pruN 1.98-1.7+dfsg-1/man/curlSetOpt.Rd 1.98-1.8+dfsg-1/man/curlSetOpt.Rd
--- 1.98-1.7+dfsg-1/man/curlSetOpt.Rd	2021-03-16 14:58:50.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/curlSetOpt.Rd	2022-07-30 14:39:35.000000000 +0000
@@ -64,7 +64,7 @@ curlSetOpt(..., .opts = list(), curl = g
   \code{\link{dupCurlHandle}}  
 }
 \examples{
-if(url.exists("http://www.omegahat.net")) {
+if(url.exists("https://www.omegahat.net")) {
 
   curl = getCurlHandle()
      # Note the header that extends across two lines with the second line
@@ -72,7 +72,7 @@ if(url.exists("http://www.omegahat.net")
    curlSetOpt( .opts = list(httpheader = c(Date = "Wed, 1/2/2000 10:01:01",
                             foo="abc\n    extra line"), verbose = TRUE),
                curl = curl)
-   ans = getURL("http://www.omegahat.net", curl = curl)
+   ans = getURL("https://www.omegahat.net", curl = curl)
 }
 }
 \keyword{IO}
diff -pruN 1.98-1.7+dfsg-1/man/dynCurlReader.Rd 1.98-1.8+dfsg-1/man/dynCurlReader.Rd
--- 1.98-1.7+dfsg-1/man/dynCurlReader.Rd	2021-03-16 14:58:50.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/dynCurlReader.Rd	2022-07-30 14:40:02.000000000 +0000
@@ -96,18 +96,18 @@ dynCurlReader(curl = getCurlHandle(), tx
 
    # Each of these examples can be done with getURLContent().
    # These are here just to illustrate the dynamic reader.
-if(url.exists("http://www.omegahat.net/Rcartogram/demo.jpg")) withAutoprint({
+if(url.exists("https://www.omegahat.net/Rcartogram/demo.jpg")) withAutoprint({
   header = dynCurlReader()
-  curlPerform(url = "http://www.omegahat.net/Rcartogram/demo.jpg",
+  curlPerform(url = "https://www.omegahat.net/Rcartogram/demo.jpg",
               headerfunction = header$update, curl = header$curl())
   class( header$value() )
   length( header$value() )
 })
 
-if(url.exists("http://www.omegahat.net/dd.gz")) withAutoprint({
+if(url.exists("https://www.omegahat.net/dd.gz")) withAutoprint({
      # gzip example.
   header = dynCurlReader()
-  curlPerform(url = "http://www.omegahat.net/dd.gz",
+  curlPerform(url = "https://www.omegahat.net/dd.gz",
               headerfunction = header$update, curl = header$curl())
   class( header$value() )
   length( header$value() )
diff -pruN 1.98-1.7+dfsg-1/man/getBinaryURL.Rd 1.98-1.8+dfsg-1/man/getBinaryURL.Rd
--- 1.98-1.7+dfsg-1/man/getBinaryURL.Rd	2022-06-08 19:08:34.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/getBinaryURL.Rd	2022-07-30 14:40:15.000000000 +0000
@@ -56,7 +56,7 @@ getBinaryURL(url, ..., .opts = list(), c
   \code{\link{memDecompress}}
 }
 \examples{
-  u = "http://www.omegahat.net/RCurl/data.gz"
+  u = "https://www.omegahat.net/RCurl/data.gz"
 
 if(url.exists(u)) withAutoprint({
 
diff -pruN 1.98-1.7+dfsg-1/man/getCurlHandle.Rd 1.98-1.8+dfsg-1/man/getCurlHandle.Rd
--- 1.98-1.7+dfsg-1/man/getCurlHandle.Rd	2021-03-16 14:58:50.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/getCurlHandle.Rd	2022-07-30 14:40:26.000000000 +0000
@@ -69,10 +69,10 @@ getCurlMultiHandle(..., .handles = list(
                               followlocation = TRUE,
                               autoreferer = TRUE,
                               nosignal = TRUE))
-  if(url.exists("http://www.omegahat.net/RCurl")) {
-     x = getURL("http://www.omegahat.net/RCurl")
+  if(url.exists("https://www.omegahat.net/RCurl")) {
+     x = getURL("https://www.omegahat.net/RCurl")
         # here we override one of these.
-     x = getURL("http://www.omegahat.net/RCurl", verbose = FALSE)
+     x = getURL("https://www.omegahat.net/RCurl", verbose = FALSE)
   }
 }
 \keyword{IO}
diff -pruN 1.98-1.7+dfsg-1/man/getCurlInfo.Rd 1.98-1.8+dfsg-1/man/getCurlInfo.Rd
--- 1.98-1.7+dfsg-1/man/getCurlInfo.Rd	2021-03-16 14:58:50.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/getCurlInfo.Rd	2022-07-30 14:40:34.000000000 +0000
@@ -49,9 +49,9 @@ getCurlInfoConstants()
   \code{\link{getCurlHandle}}
 }
 \examples{
- if(url.exists("http://www.omegahat.net/RCurl/index.html")) withAutoprint({
+ if(url.exists("https://www.omegahat.net/RCurl/index.html")) withAutoprint({
     curl = getCurlHandle()
-    txt = getURL("http://www.omegahat.net/RCurl/index.html", curl = curl)
+    txt = getURL("https://www.omegahat.net/RCurl/index.html", curl = curl)
     getCurlInfo(curl)
     rm(curl)  # release the curl!
  })
diff -pruN 1.98-1.7+dfsg-1/man/getFormParams.Rd 1.98-1.8+dfsg-1/man/getFormParams.Rd
--- 1.98-1.7+dfsg-1/man/getFormParams.Rd	2020-01-15 16:02:43.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/getFormParams.Rd	2022-07-30 14:40:43.000000000 +0000
@@ -28,9 +28,9 @@ Duncan Temple Lang
 }
 
 \examples{
-if(url.exists("http://www.omegahat.net/foo/bob.R")) withAutoPrint({
+if(url.exists("https://www.omegahat.net/foo/bob.R")) withAutoPrint({
 
-  getFormParams("http://www.omegahat.net/foo/bob.R?xyz=1&abc=verylong")
+  getFormParams("https://www.omegahat.net/foo/bob.R?xyz=1&abc=verylong")
 
   getFormParams("xyz=1&abc=verylong")
   getFormParams("xyz=1&abc=&on=true")
diff -pruN 1.98-1.7+dfsg-1/man/getURIAsynchronous.Rd 1.98-1.8+dfsg-1/man/getURIAsynchronous.Rd
--- 1.98-1.7+dfsg-1/man/getURIAsynchronous.Rd	2021-03-16 14:58:50.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/getURIAsynchronous.Rd	2022-07-30 14:37:34.000000000 +0000
@@ -17,7 +17,7 @@
   and so increases the consumption of CPU cycles.
   On the other hand, there is a potentially large saving of
   time when one considers total time to download.
-  See \url{http://www.omegahat.net/RCurl/concurrent.xml}
+  See \url{https://www.omegahat.net/RCurl/concurrent.xml}
   for more details.  This is a common trade-off that arises in
   concurrent/parallel/asynchronous computing.
    
@@ -110,8 +110,8 @@ getURIAsynchronous(url, ..., .opts = lis
   \code{\link{curlMultiPerform}}  
 }
 \examples{
-  uris = c("http://www.omegahat.net/RCurl/index.html",
-           "http://www.omegahat.net/RCurl/philosophy.xml")
+  uris = c("https://www.omegahat.net/RCurl/index.html",
+           "https://www.omegahat.net/RCurl/philosophy.xml")
   txt = getURIAsynchronous(uris)
   names(txt)
   nchar(txt)
diff -pruN 1.98-1.7+dfsg-1/man/getURL.Rd 1.98-1.8+dfsg-1/man/getURL.Rd
--- 1.98-1.7+dfsg-1/man/getURL.Rd	2021-08-17 14:12:35.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/getURL.Rd	2022-07-30 14:36:41.000000000 +0000
@@ -134,11 +134,11 @@ getURLContent(url, ..., curl = getCurlHa
 }
 \examples{
 
-  omegahatExists = url.exists("http://www.omegahat.net")
+  omegahatExists = url.exists("https://www.omegahat.net")
 
    # Regular HTTP
   if(omegahatExists && requireNamespace("XML", quietly = TRUE)) withAutoprint({
-     txt = getURL("http://www.omegahat.net/RCurl/")
+     txt = getURL("https://www.omegahat.net/RCurl/")
      ## Then we could parse the result.
      XML::htmlTreeParse(txt, asText = TRUE)
   })
@@ -161,8 +161,8 @@ getURLContent(url, ..., curl = getCurlHa
   if(interactive() && omegahatExists) {
      curl = getCurlHandle()
      pages = list()
-     for(u in c("http://www.omegahat.net/RCurl/index.html",
-                "http://www.omegahat.net/RGtk/index.html")) {
+     for(u in c("https://www.omegahat.net/RCurl/index.html",
+                "https://www.omegahat.net/RGtk/index.html")) {
          pages[[u]] = getURL(u, curl = curl)
      }
   }
@@ -171,7 +171,7 @@ getURLContent(url, ..., curl = getCurlHa
     # Set additional fields in the header of the HTTP request.
     # verbose option allows us to see that they were included.
   if(omegahatExists)
-     getURL("http://www.omegahat.net", httpheader = c(Accept = "text/html", 
+     getURL("https://www.omegahat.net", httpheader = c(Accept = "text/html", 
                                                       MyField = "Duncan"), 
                verbose = TRUE)
 
@@ -183,7 +183,7 @@ getURLContent(url, ..., curl = getCurlHa
     # status line)
   if(omegahatExists) withAutoprint({
      h = basicTextGatherer()
-     txt = getURL("http://www.omegahat.net/RCurl/index.html",
+     txt = getURL("https://www.omegahat.net/RCurl/index.html",
                   header= TRUE, headerfunction = h$update, 
                   httpheader = c(Accept="text/html", Test=1), verbose = TRUE) 
      print(paste(h$value(NULL)[-1], collapse=""))
@@ -196,12 +196,12 @@ getURLContent(url, ..., curl = getCurlHa
 
    # Test the passwords.
   if(omegahatExists) withAutoprint({
-     x = getURL("http://www.omegahat.net/RCurl/testPassword/index.html",  userpwd = "bob:duncantl")
+     x = getURL("https://www.omegahat.net/RCurl/testPassword/index.html",  userpwd = "bob:duncantl")
 
        # Catch an error because no authorization
        # We catch the generic HTTPError, but we could catch the more specific "Unauthorized" error
        # type.
-      x = tryCatch(getURLContent("http://www.omegahat.net/RCurl/testPassword/index.html"),
+      x = tryCatch(getURLContent("https://www.omegahat.net/RCurl/testPassword/index.html"),
                     HTTPError = function(e) {
                                    cat("HTTP error: ", e$message, "\n")
                                 })
@@ -210,7 +210,7 @@ getURLContent(url, ..., curl = getCurlHa
 \dontrun{
   #  Needs specific information from the cookie file on a per user basis
   #  with a registration to the NY times.
-  x = getURL("http://www.nytimes.com",
+  x = getURL("https://www.nytimes.com",
                  header = TRUE, verbose = TRUE,
                  cookiefile = "/home/duncan/Rcookies",
                  netrc = TRUE,
@@ -221,7 +221,7 @@ getURLContent(url, ..., curl = getCurlHa
 
    if(interactive() && omegahatExists) {
        d = debugGatherer()
-       x = getURL("http://www.omegahat.net", debugfunction = d$update, verbose = TRUE)
+       x = getURL("https://www.omegahat.net", debugfunction = d$update, verbose = TRUE)
        d$value()
    }
 
@@ -230,11 +230,11 @@ getURLContent(url, ..., curl = getCurlHa
 
    if(interactive() && omegahatExists) {
       opts = curlOptions(header = TRUE, userpwd = "bob:duncantl", netrc = TRUE)
-      getURL("http://www.omegahat.net/RCurl/testPassword/index.html", verbose = TRUE, .opts = opts)
+      getURL("https://www.omegahat.net/RCurl/testPassword/index.html", verbose = TRUE, .opts = opts)
 
          # Using options in the CURL handle.
       h = getCurlHandle(header = TRUE, userpwd = "bob:duncantl", netrc = TRUE)
-      getURL("http://www.omegahat.net/RCurl/testPassword/index.html", verbose = TRUE, curl = h)
+      getURL("https://www.omegahat.net/RCurl/testPassword/index.html", verbose = TRUE, curl = h)
    }
 
 
@@ -242,15 +242,15 @@ getURLContent(url, ..., curl = getCurlHa
    # Use a C routine as the reader. Currently gives a warning.
   if(interactive() && omegahatExists) {
      routine = getNativeSymbolInfo("R_internalWriteTest", PACKAGE = "RCurl")$address
-     getURL("http://www.omegahat.net/RCurl/index.html", writefunction = routine)
+     getURL("https://www.omegahat.net/RCurl/index.html", writefunction = routine)
   }
 
 
 
   # Example
   if(interactive() && omegahatExists) {
-     uris = c("http://www.omegahat.net/RCurl/index.html",
-              "http://www.omegahat.net/RCurl/philosophy.xml")
+     uris = c("https://www.omegahat.net/RCurl/index.html",
+              "https://www.omegahat.net/RCurl/philosophy.xml")
      txt = getURI(uris)
      names(txt)
      nchar(txt)
@@ -267,20 +267,20 @@ getURLContent(url, ..., curl = getCurlHa
 
 
          # getURLContent() for text and binary
-     x = getURLContent("http://www.omegahat.net/RCurl/index.html")
+     x = getURLContent("https://www.omegahat.net/RCurl/index.html")
      class(x)
 
-     x = getURLContent("http://www.omegahat.net/RCurl/data.gz")
+     x = getURLContent("https://www.omegahat.net/RCurl/data.gz")
      class(x)
      attr(x, "Content-Type")
 
-     x = getURLContent("http://www.omegahat.net/Rcartogram/demo.jpg")
+     x = getURLContent("https://www.omegahat.net/Rcartogram/demo.jpg")
      class(x)
      attr(x, "Content-Type")
 
 
      curl = getCurlHandle()
-     dd = getURLContent("http://www.omegahat.net/RJSONIO/RJSONIO.pdf",
+     dd = getURLContent("https://www.omegahat.net/RJSONIO/RJSONIO.pdf",
                         curl = curl,
                         header = dynCurlReader(curl, binary = TRUE,
                                            value = function(x) {
diff -pruN 1.98-1.7+dfsg-1/man/MultiCURLHandle-class.Rd 1.98-1.8+dfsg-1/man/MultiCURLHandle-class.Rd
--- 1.98-1.7+dfsg-1/man/MultiCURLHandle-class.Rd	2021-03-16 14:58:50.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/MultiCURLHandle-class.Rd	2022-07-30 14:37:50.000000000 +0000
@@ -51,7 +51,7 @@
 }
 
 \references{Curl homepage \url{https://curl.se/}
-  \url{http://www.omegahat.net/RCurl/}
+  \url{https://www.omegahat.net/RCurl/}
 }
 \author{Duncan Temple Lang}
 \seealso{
diff -pruN 1.98-1.7+dfsg-1/man/url.exists.Rd 1.98-1.8+dfsg-1/man/url.exists.Rd
--- 1.98-1.7+dfsg-1/man/url.exists.Rd	2020-01-16 11:46:25.000000000 +0000
+++ 1.98-1.8+dfsg-1/man/url.exists.Rd	2022-07-30 14:37:06.000000000 +0000
@@ -47,7 +47,7 @@ Duncan Temple Lang
  \code{\link{curlPerform}}
 }
 \examples{
- url.exists("http://www.omegahat.net/RCurl")
- try(url.exists("http://www.omegahat.net/RCurl-xxx"))
+ url.exists("https://www.omegahat.net/RCurl")
+ try(url.exists("https://www.omegahat.net/RCurl-xxx"))
 }
 
diff -pruN 1.98-1.7+dfsg-1/MD5 1.98-1.8+dfsg-1/MD5
--- 1.98-1.7+dfsg-1/MD5	2022-06-09 09:26:30.000000000 +0000
+++ 1.98-1.8+dfsg-1/MD5	2022-07-30 14:52:51.000000000 +0000
@@ -1,4 +1,4 @@
-8e7f5bbf2e85db91d19861d8529e9b96 *DESCRIPTION
+9359e4534ecb72ab67fd4684aa5a1357 *DESCRIPTION
 b7e3b7d7512e387c2e3256a7d7bc5b15 *LICENSE
 56eb3e6bd8cdb962b7fda019a2b605e4 *NAMESPACE
 2e5893f298d1c69b4fcb16377c41a905 *R/aclassesEnums.R
@@ -35,7 +35,7 @@ cabb7f48acb3dd85a0acbcd4729d9ddd *README
 0e9496f447b280164785da091956fa1a *configure
 f393989017aa53ff6890a7f610d99f3c *configure.ac
 d41d8cd98f00b204e9800998ecf8427e *configure.win
-33b7e9c072d752f93a34675e82ed2578 *data/mimeTypeExtensions.rda
+a1f6addec97ef547de92290f014f4deb *data/mimeTypeExtensions.rda
 3ff9d631b3ee4b545e9904b0e805f6d9 *inst/CurlSSL/boost.pem
 83e2ea55aeb9fc9578554decfb0ec5f6 *inst/CurlSSL/ca-bundle.crt
 c726ae88fd600aa26df1d30f42b51fec *inst/CurlSSL/cacert.pem
@@ -56,10 +56,10 @@ dc7cd605de74c70c165064b5f5fb672f *inst/C
 68f704b46294b46e68ac1257824f956c *inst/doc/cookies.xml
 fbb29df6a29a38673037bb5a9fe3fc58 *inst/doc/fileUploads.xml
 0aab7ffbdf2502d064984e704a1bc016 *inst/doc/getURL.html
-a96a71cc589f553a87377321231f3095 *inst/doc/philosophy.html
+59c3d2ab2aca0af5ac7e2170ae48bec7 *inst/doc/philosophy.html
 98c0f57cf1674ddaffbf8716af940645 *inst/doc/philosophy.xml
 eb2a4f621cfa614df5959b616045ecce *inst/doc/withCookies.Rdb
-fb44b1f6945fb40ba4b81b936003806e *inst/doc/withCookies.html
+1b71285053ffc37e9457cdcc4deb9d8c *inst/doc/withCookies.html
 eec77bb996fa92609f23413fcf51c9f3 *inst/enums/Renums.c
 6c6df7d819f306fcd0fb3319a752524f *inst/etc/README
 308c55e7a7d56a27fcfb3983169c3491 *inst/etc/ca-bundle.crt
@@ -107,35 +107,35 @@ b972f3825d96c0acd67be106540bb25d *inst/e
 ef3fe966358c63dca16977553be13c47 *man/CURLHandle-class.Rd
 6fdb8b7a6bcd7194bdb0c54b772fae0b *man/CurlFeatureBits.Rd
 74eeeecf80d17bdba23c91fe8e7f6296 *man/HTTP_VERSION_1_0.Rd
-465fc418ccc4739582a7a9a1c8ca57f3 *man/MultiCURLHandle-class.Rd
+a8648421f0410e936efd1fb1f60a3e02 *man/MultiCURLHandle-class.Rd
 a01d37a6d891d549afc86a6418440a1b *man/RCurl-internal.Rd
 e56877e0dca09c2d57ed397f0fa43a14 *man/base64.Rd
-1b4a86d54818cf65807bb229cf1629b0 *man/basicHeaderGatherer.Rd
-fe1931c41a6bf09f3f49a98fb76bdcd5 *man/basicTextGatherer.Rd
-92c2496cf068ddd4af927fce4af67c0e *man/binaryBuffer.Rd
-287cd1e458db94de0d7ed622077a81be *man/chunkToLineReader.Rd
+72f6f7e69de5c5e9795d6693eddd892d *man/basicHeaderGatherer.Rd
+ef4f7683b2703a57b740f7725c30008a *man/basicTextGatherer.Rd
+109b59adf87470f3c7797f44688ca931 *man/binaryBuffer.Rd
+cef6431f128b66e7da40ef1e0cf430c1 *man/chunkToLineReader.Rd
 b8ca83760e37e80f1f1db526d24c37ac *man/clone.Rd
 70a29e3de2b6cbe6c3478660301bf228 *man/complete.Rd
 af109e94138009dc642fea350333e8fb *man/curlError.Rd
 6d95af74ea262449fb4c8b22c3e1db3e *man/curlEscape.Rd
 eca87a5a639f5c05d250f7bdbdd4ddd1 *man/curlGlobalInit.Rd
 1addae1cd78dc7f033bd689afcde1d1a *man/curlOptions.Rd
-163b594af8a732ef74696b4a6aa9bb11 *man/curlPerform.Rd
-6726556bec30d0389cd22a236dedea5a *man/curlSetOpt.Rd
+7a780a3d7a71734124c756cdada1d6cb *man/curlPerform.Rd
+4815bed5737531c0b15ab9eb2b77d62b *man/curlSetOpt.Rd
 16d4816cac5ebb6c0214bb53ad9a039f *man/curlVersion.Rd
-4988fc48230baef8d91366e34c542218 *man/dynCurlReader.Rd
+8f7f1344a9d3495dfab35f4d0909b9f0 *man/dynCurlReader.Rd
 c764ee32849c47119824f6ac8d592a31 *man/enums.Rd
 dd598387b97152565122014af72171b7 *man/fileUpload.Rd
 929db5b50da92d52507498985c3c622e *man/findHTTPHeaderEncoding.Rd
 0ac4db64fa8c0b246f04c41900ec458a *man/ftpUpload.Rd
-ab56e2b2fe99fa3963b23b3b6b671ad0 *man/getBinaryURL.Rd
+6cc3a699190f24d38a4723e4a45af135 *man/getBinaryURL.Rd
 14fb10c4d0e001b2695c500e7aa652e5 *man/getBitIndicators.Rd
 9ab011d7f67c1671d89826bea1183d56 *man/getCurlErrorClassNames.Rd
-136be8674c7e39d1fcd0b010a7ea7ac7 *man/getCurlHandle.Rd
-29b1f7a1f8abb034cd8dc3c46e06ec17 *man/getCurlInfo.Rd
-5ea3928cc2e827a85bf7fa2ea7ae71e4 *man/getFormParams.Rd
-eade942e9b68d23d24eba158d1c95dc6 *man/getURIAsynchronous.Rd
-9f4b473e37407fd646155cde1dcbb562 *man/getURL.Rd
+c476c300fa1b7976f29cabe73a936fea *man/getCurlHandle.Rd
+b2ba5e76bb1ecca92d9f54b70db00178 *man/getCurlInfo.Rd
+9824c8c8a38a0cb8f74f0a11e7075fc7 *man/getFormParams.Rd
+959e80b76d100e5dbbac3c61ebecee1b *man/getURIAsynchronous.Rd
+c4acbd526ca759d823d83ca4727ac115 *man/getURL.Rd
 36e5e8a64819bb5c1e04d7555db13988 *man/guessMIMEType.Rd
 a5ecefeeffec3d7e04b259633adc337e *man/httpPUT.Rd
 c3a3bf354a3d07a647d4ed460c461792 *man/merge.list.Rd
@@ -143,7 +143,7 @@ c3a3bf354a3d07a647d4ed460c461792 *man/me
 c77d502baa3ac5e8113d05e2d29c81a9 *man/postForm.Rd
 8c58acfb71607583f9bb2615bc54c2c3 *man/reset.Rd
 b2b50d815780ba72211ec90f5a268822 *man/scp.Rd
-d15d64d9f1d5b439f47dd09fe0413c2c *man/url.exists.Rd
+eaaa081e04e728ed58c71efb35613f6f *man/url.exists.Rd
 dbd7d5f12f5e4e7648f03980e198a64f *src/CURLINFOTable.h
 f34ce779bd78b3b384dad3ef274e7fa2 *src/CURLOptTable.h
 12fe732c6d51fdab1a4a1c3bc9cbedb9 *src/CurlErrorEnums.h
