2011-08-20 20:11:42 +00:00
|
|
|
{- Url downloading.
|
2011-08-17 00:49:04 +00:00
|
|
|
-
|
2018-04-04 19:15:12 +00:00
|
|
|
- Copyright 2011-2018 Joey Hess <id@joeyh.name>
|
2011-08-17 00:49:04 +00:00
|
|
|
-
|
2014-05-10 14:01:27 +00:00
|
|
|
- License: BSD-2-clause
|
2011-08-17 00:49:04 +00:00
|
|
|
-}
|
|
|
|
|
2012-10-10 15:26:30 +00:00
|
|
|
{-# LANGUAGE CPP #-}
|
2014-08-15 22:02:17 +00:00
|
|
|
{-# LANGUAGE OverloadedStrings #-}
|
2014-08-17 19:39:01 +00:00
|
|
|
{-# LANGUAGE RankNTypes #-}
|
2015-05-10 19:37:55 +00:00
|
|
|
{-# LANGUAGE FlexibleContexts #-}
|
2012-10-10 15:26:30 +00:00
|
|
|
|
2011-08-20 20:11:42 +00:00
|
|
|
module Utility.Url (
|
2018-04-04 19:15:12 +00:00
|
|
|
newManager,
|
2015-10-15 14:34:19 +00:00
|
|
|
managerSettings,
|
2012-01-02 18:20:20 +00:00
|
|
|
URLString,
|
2013-09-28 18:35:21 +00:00
|
|
|
UserAgent,
|
2018-04-04 19:15:12 +00:00
|
|
|
UrlOptions(..),
|
|
|
|
defUrlOptions,
|
2014-08-15 21:47:21 +00:00
|
|
|
mkUrlOptions,
|
2012-02-10 23:17:41 +00:00
|
|
|
check,
|
2013-10-11 17:05:00 +00:00
|
|
|
checkBoth,
|
2011-08-17 00:49:04 +00:00
|
|
|
exists,
|
2015-01-22 18:52:52 +00:00
|
|
|
UrlInfo(..),
|
|
|
|
getUrlInfo,
|
2015-08-19 16:24:55 +00:00
|
|
|
assumeUrlExists,
|
2011-08-17 00:49:04 +00:00
|
|
|
download,
|
2013-11-25 03:44:30 +00:00
|
|
|
downloadQuiet,
|
2017-12-06 17:16:06 +00:00
|
|
|
downloadPartial,
|
2016-07-12 20:30:36 +00:00
|
|
|
parseURIRelaxed,
|
|
|
|
matchStatusCodeException,
|
2017-09-12 19:13:42 +00:00
|
|
|
matchHttpExceptionContent,
|
2011-08-17 00:49:04 +00:00
|
|
|
) where
|
|
|
|
|
2012-03-16 00:39:25 +00:00
|
|
|
import Common
|
2017-12-31 20:08:31 +00:00
|
|
|
import Utility.Tmp.Dir
|
2017-12-14 16:46:57 +00:00
|
|
|
import qualified BuildInfo
|
2015-05-05 17:53:06 +00:00
|
|
|
|
2011-08-17 00:49:04 +00:00
|
|
|
import Network.URI
|
2014-08-15 21:17:19 +00:00
|
|
|
import Network.HTTP.Types
|
|
|
|
import qualified Data.CaseInsensitive as CI
|
2014-08-15 22:02:17 +00:00
|
|
|
import qualified Data.ByteString as B
|
2014-08-15 21:17:19 +00:00
|
|
|
import qualified Data.ByteString.UTF8 as B8
|
2017-12-06 17:16:06 +00:00
|
|
|
import qualified Data.ByteString.Lazy as L
|
2015-10-01 17:47:54 +00:00
|
|
|
import Control.Monad.Trans.Resource
|
2018-04-04 19:15:12 +00:00
|
|
|
import Network.HTTP.Conduit
|
2017-12-06 17:16:06 +00:00
|
|
|
import Network.HTTP.Client (brRead, withResponse)
|
2015-10-01 17:47:54 +00:00
|
|
|
|
2017-08-17 15:00:48 +00:00
|
|
|
#if ! MIN_VERSION_http_client(0,5,0)
|
|
|
|
responseTimeoutNone :: Maybe Int
|
|
|
|
responseTimeoutNone = Nothing
|
|
|
|
#endif
|
|
|
|
|
2015-10-15 14:34:19 +00:00
|
|
|
managerSettings :: ManagerSettings
|
|
|
|
#if MIN_VERSION_http_conduit(2,1,7)
|
|
|
|
managerSettings = tlsManagerSettings
|
|
|
|
#else
|
|
|
|
managerSettings = conduitManagerSettings
|
|
|
|
#endif
|
2017-08-15 17:56:12 +00:00
|
|
|
{ managerResponseTimeout = responseTimeoutNone }
|
2015-10-15 14:34:19 +00:00
|
|
|
|
2011-08-17 00:49:04 +00:00
|
|
|
type URLString = String
|
|
|
|
|
2012-04-22 05:13:09 +00:00
|
|
|
type Headers = [String]
|
|
|
|
|
2013-09-28 18:35:21 +00:00
|
|
|
type UserAgent = String
|
|
|
|
|
2014-08-15 21:47:21 +00:00
|
|
|
data UrlOptions = UrlOptions
|
2014-02-25 02:00:25 +00:00
|
|
|
{ userAgent :: Maybe UserAgent
|
|
|
|
, reqHeaders :: Headers
|
|
|
|
, reqParams :: [CommandParam]
|
2014-08-15 21:47:21 +00:00
|
|
|
, applyRequest :: Request -> Request
|
2018-04-04 19:15:12 +00:00
|
|
|
, httpManager :: Manager
|
2014-02-25 02:00:25 +00:00
|
|
|
}
|
|
|
|
|
2018-04-04 19:15:12 +00:00
|
|
|
defUrlOptions :: IO UrlOptions
|
|
|
|
defUrlOptions = UrlOptions
|
|
|
|
<$> pure Nothing
|
|
|
|
<*> pure []
|
|
|
|
<*> pure []
|
|
|
|
<*> pure id
|
|
|
|
<*> newManager managerSettings
|
|
|
|
|
|
|
|
mkUrlOptions :: Maybe UserAgent -> Headers -> [CommandParam] -> Manager -> UrlOptions
|
|
|
|
mkUrlOptions defuseragent reqheaders reqparams manager =
|
|
|
|
UrlOptions useragent reqheaders reqparams applyrequest manager
|
2014-08-15 21:17:19 +00:00
|
|
|
where
|
|
|
|
applyrequest = \r -> r { requestHeaders = requestHeaders r ++ addedheaders }
|
|
|
|
addedheaders = uaheader ++ otherheaders
|
2016-01-11 16:10:38 +00:00
|
|
|
useragent = maybe defuseragent (Just . B8.toString . snd)
|
|
|
|
(headMaybe uafromheaders)
|
2014-08-15 21:47:21 +00:00
|
|
|
uaheader = case useragent of
|
2014-08-15 21:17:19 +00:00
|
|
|
Nothing -> []
|
|
|
|
Just ua -> [(hUserAgent, B8.fromString ua)]
|
2016-01-11 16:10:38 +00:00
|
|
|
(uafromheaders, otherheaders) = partition (\(h, _) -> h == hUserAgent)
|
|
|
|
(map toheader reqheaders)
|
2014-08-15 21:17:19 +00:00
|
|
|
toheader s =
|
|
|
|
let (h, v) = separate (== ':') s
|
|
|
|
h' = CI.mk (B8.fromString h)
|
|
|
|
in case v of
|
|
|
|
(' ':v') -> (h', B8.fromString v')
|
|
|
|
_ -> (h', B8.fromString v)
|
|
|
|
|
|
|
|
addUserAgent :: UrlOptions -> [CommandParam] -> [CommandParam]
|
2014-08-15 21:47:21 +00:00
|
|
|
addUserAgent uo ps = case userAgent uo of
|
2014-08-15 21:17:19 +00:00
|
|
|
Nothing -> ps
|
|
|
|
-- --user-agent works for both wget and curl commands
|
|
|
|
Just ua -> ps ++ [Param "--user-agent", Param ua]
|
|
|
|
|
2012-02-10 23:17:41 +00:00
|
|
|
{- Checks that an url exists and could be successfully downloaded,
|
|
|
|
- also checking that its size, if available, matches a specified size. -}
|
2014-02-25 02:00:25 +00:00
|
|
|
checkBoth :: URLString -> Maybe Integer -> UrlOptions -> IO Bool
|
|
|
|
checkBoth url expected_size uo = do
|
|
|
|
v <- check url expected_size uo
|
2013-10-11 17:05:00 +00:00
|
|
|
return (fst v && snd v)
|
2016-12-28 04:17:36 +00:00
|
|
|
|
2014-02-25 02:00:25 +00:00
|
|
|
check :: URLString -> Maybe Integer -> UrlOptions -> IO (Bool, Bool)
|
2018-04-04 19:15:12 +00:00
|
|
|
check url expected_size uo = go <$> getUrlInfo url uo
|
2012-12-13 04:24:19 +00:00
|
|
|
where
|
2015-01-22 18:52:52 +00:00
|
|
|
go (UrlInfo False _ _) = (False, False)
|
|
|
|
go (UrlInfo True Nothing _) = (True, True)
|
|
|
|
go (UrlInfo True s _) = case expected_size of
|
2013-10-11 17:05:00 +00:00
|
|
|
Just _ -> (True, expected_size == s)
|
|
|
|
Nothing -> (True, True)
|
2012-02-10 23:17:41 +00:00
|
|
|
|
2015-01-22 18:52:52 +00:00
|
|
|
exists :: URLString -> UrlOptions -> IO Bool
|
|
|
|
exists url uo = urlExists <$> getUrlInfo url uo
|
|
|
|
|
|
|
|
data UrlInfo = UrlInfo
|
|
|
|
{ urlExists :: Bool
|
|
|
|
, urlSize :: Maybe Integer
|
|
|
|
, urlSuggestedFile :: Maybe FilePath
|
|
|
|
}
|
2016-07-12 20:30:36 +00:00
|
|
|
deriving (Show)
|
2015-01-22 18:52:52 +00:00
|
|
|
|
2015-08-19 16:24:55 +00:00
|
|
|
assumeUrlExists :: UrlInfo
|
|
|
|
assumeUrlExists = UrlInfo True Nothing Nothing
|
|
|
|
|
2012-02-10 23:17:41 +00:00
|
|
|
{- Checks that an url exists and could be successfully downloaded,
|
2015-01-22 18:52:52 +00:00
|
|
|
- also returning its size and suggested filename if available. -}
|
|
|
|
getUrlInfo :: URLString -> UrlOptions -> IO UrlInfo
|
|
|
|
getUrlInfo url uo = case parseURIRelaxed url of
|
2017-12-06 17:16:06 +00:00
|
|
|
Just u -> case parseUrlConduit (show u) of
|
2016-07-12 20:30:36 +00:00
|
|
|
Just req -> catchJust
|
|
|
|
-- When http redirects to a protocol which
|
|
|
|
-- conduit does not support, it will throw
|
|
|
|
-- a StatusCodeException with found302.
|
|
|
|
(matchStatusCodeException (== found302))
|
|
|
|
(existsconduit req)
|
|
|
|
(const (existscurl u))
|
|
|
|
`catchNonAsync` (const dne)
|
2014-08-15 21:17:19 +00:00
|
|
|
-- http-conduit does not support file:, ftp:, etc urls,
|
|
|
|
-- so fall back to reading files and using curl.
|
|
|
|
Nothing
|
|
|
|
| uriScheme u == "file:" -> do
|
2015-01-20 20:58:48 +00:00
|
|
|
let f = unEscapeString (uriPath u)
|
|
|
|
s <- catchMaybeIO $ getFileStatus f
|
2014-08-15 21:17:19 +00:00
|
|
|
case s of
|
2015-01-20 20:58:48 +00:00
|
|
|
Just stat -> do
|
|
|
|
sz <- getFileSize' f stat
|
2015-01-22 18:52:52 +00:00
|
|
|
found (Just sz) Nothing
|
2014-08-15 21:17:19 +00:00
|
|
|
Nothing -> dne
|
2017-12-14 16:46:57 +00:00
|
|
|
| BuildInfo.curl -> existscurl u
|
2014-08-15 21:17:19 +00:00
|
|
|
| otherwise -> dne
|
2013-01-26 22:30:53 +00:00
|
|
|
Nothing -> dne
|
2012-12-13 04:24:19 +00:00
|
|
|
where
|
2015-01-22 18:52:52 +00:00
|
|
|
dne = return $ UrlInfo False Nothing Nothing
|
|
|
|
found sz f = return $ UrlInfo True sz f
|
2013-01-26 22:30:53 +00:00
|
|
|
|
2014-02-25 02:00:25 +00:00
|
|
|
curlparams = addUserAgent uo $
|
2013-09-28 18:35:21 +00:00
|
|
|
[ Param "-s"
|
|
|
|
, Param "--head"
|
|
|
|
, Param "-L", Param url
|
|
|
|
, Param "-w", Param "%{http_code}"
|
2014-02-25 02:00:25 +00:00
|
|
|
] ++ concatMap (\h -> [Param "-H", Param h]) (reqHeaders uo) ++ (reqParams uo)
|
2013-01-26 22:30:53 +00:00
|
|
|
|
2014-08-15 21:17:19 +00:00
|
|
|
extractlencurl s = case lastMaybe $ filter ("Content-Length:" `isPrefixOf`) (lines s) of
|
2013-01-26 22:30:53 +00:00
|
|
|
Just l -> case lastMaybe $ words l of
|
|
|
|
Just sz -> readish sz
|
|
|
|
_ -> Nothing
|
|
|
|
_ -> Nothing
|
2014-08-15 21:17:19 +00:00
|
|
|
|
2015-01-22 18:52:52 +00:00
|
|
|
extractlen = readish . B8.toString <=< firstheader hContentLength
|
|
|
|
|
|
|
|
extractfilename = contentDispositionFilename . B8.toString
|
|
|
|
<=< firstheader hContentDisposition
|
|
|
|
|
|
|
|
firstheader h = headMaybe . map snd .
|
|
|
|
filter (\p -> fst p == h) . responseHeaders
|
|
|
|
|
2015-10-01 17:47:54 +00:00
|
|
|
existsconduit req = do
|
2015-01-22 17:47:06 +00:00
|
|
|
let req' = headRequest (applyRequest uo req)
|
2018-04-04 19:15:12 +00:00
|
|
|
runResourceT $ do
|
|
|
|
resp <- http req' (httpManager uo)
|
|
|
|
-- forces processing the response while
|
|
|
|
-- within the runResourceT
|
2015-10-01 17:47:54 +00:00
|
|
|
liftIO $ if responseStatus resp == ok200
|
|
|
|
then found
|
|
|
|
(extractlen resp)
|
|
|
|
(extractfilename resp)
|
|
|
|
else dne
|
2013-09-28 18:35:21 +00:00
|
|
|
|
2016-07-12 20:30:36 +00:00
|
|
|
existscurl u = do
|
|
|
|
output <- catchDefaultIO "" $
|
|
|
|
readProcess "curl" $ toCommand curlparams
|
|
|
|
let len = extractlencurl output
|
|
|
|
let good = found len Nothing
|
|
|
|
let isftp = or
|
|
|
|
[ "ftp" `isInfixOf` uriScheme u
|
|
|
|
-- Check to see if http redirected to ftp.
|
|
|
|
, "Location: ftp://" `isInfixOf` output
|
|
|
|
]
|
|
|
|
case lastMaybe (lines output) of
|
|
|
|
Just ('2':_:_) -> good
|
|
|
|
-- don't try to parse ftp status codes; if curl
|
|
|
|
-- got a length, it's good
|
|
|
|
_ | isftp && isJust len -> good
|
|
|
|
_ -> dne
|
|
|
|
|
2015-01-22 18:52:52 +00:00
|
|
|
-- Parse eg: attachment; filename="fname.ext"
|
|
|
|
-- per RFC 2616
|
|
|
|
contentDispositionFilename :: String -> Maybe FilePath
|
|
|
|
contentDispositionFilename s
|
|
|
|
| "attachment; filename=\"" `isPrefixOf` s && "\"" `isSuffixOf` s =
|
|
|
|
Just $ reverse $ drop 1 $ reverse $
|
|
|
|
drop 1 $ dropWhile (/= '"') s
|
|
|
|
| otherwise = Nothing
|
|
|
|
|
2014-08-15 22:02:17 +00:00
|
|
|
headRequest :: Request -> Request
|
|
|
|
headRequest r = r
|
|
|
|
{ method = methodHead
|
|
|
|
-- remove defaut Accept-Encoding header, to get actual,
|
|
|
|
-- not gzip compressed size.
|
|
|
|
, requestHeaders = (hAcceptEncoding, B.empty) :
|
|
|
|
filter (\(h, _) -> h /= hAcceptEncoding)
|
|
|
|
(requestHeaders r)
|
|
|
|
}
|
|
|
|
|
2017-02-20 19:59:55 +00:00
|
|
|
{- Download a perhaps large file, with auto-resume of incomplete downloads.
|
2011-12-20 22:00:09 +00:00
|
|
|
-
|
2017-02-20 19:59:55 +00:00
|
|
|
- Uses wget or curl program for its progress bar and resuming support.
|
|
|
|
- Which program to use is determined at run time depending on which is
|
|
|
|
- in path and which works best in a particular situation.
|
2011-12-20 22:00:09 +00:00
|
|
|
-}
|
2014-02-25 02:00:25 +00:00
|
|
|
download :: URLString -> FilePath -> UrlOptions -> IO Bool
|
2013-05-25 05:47:19 +00:00
|
|
|
download = download' False
|
|
|
|
|
2017-02-20 19:59:55 +00:00
|
|
|
{- No output to stdout. -}
|
2014-02-25 02:00:25 +00:00
|
|
|
downloadQuiet :: URLString -> FilePath -> UrlOptions -> IO Bool
|
2013-05-25 05:47:19 +00:00
|
|
|
downloadQuiet = download' True
|
|
|
|
|
2014-02-25 02:00:25 +00:00
|
|
|
download' :: Bool -> URLString -> FilePath -> UrlOptions -> IO Bool
|
2015-04-04 00:38:20 +00:00
|
|
|
download' quiet url file uo = do
|
2013-03-11 03:00:33 +00:00
|
|
|
case parseURIRelaxed url of
|
|
|
|
Just u
|
2015-04-09 16:57:30 +00:00
|
|
|
| uriScheme u == "file:" -> curl
|
2017-02-20 19:59:55 +00:00
|
|
|
-- curl is preferred in quiet mode, because
|
|
|
|
-- it displays http errors to stderr, while wget
|
|
|
|
-- does not display them in quiet mode
|
|
|
|
| quiet -> ifM (inPath "curl") (curl, wget)
|
|
|
|
-- wget is preferred mostly because it has a better
|
|
|
|
-- progress bar
|
2013-03-11 03:00:33 +00:00
|
|
|
| otherwise -> ifM (inPath "wget") (wget , curl)
|
|
|
|
_ -> return False
|
2012-12-13 04:24:19 +00:00
|
|
|
where
|
2014-02-25 02:00:25 +00:00
|
|
|
headerparams = map (\h -> Param $ "--header=" ++ h) (reqHeaders uo)
|
2014-01-13 18:52:49 +00:00
|
|
|
wget = go "wget" $ headerparams ++ quietopt "-q" ++ wgetparams
|
|
|
|
{- Regular wget needs --clobber to continue downloading an existing
|
|
|
|
- file. On Android, busybox wget is used, which does not
|
2014-12-16 18:04:40 +00:00
|
|
|
- support, or need that option.
|
|
|
|
-
|
|
|
|
- When the wget version is new enough, pass options for
|
2017-02-20 19:59:55 +00:00
|
|
|
- a less cluttered download display. Using -nv rather than -q
|
|
|
|
- avoids most clutter while still displaying http errors.
|
2014-12-16 18:04:40 +00:00
|
|
|
-}
|
2014-01-13 18:52:49 +00:00
|
|
|
#ifndef __ANDROID__
|
2015-06-01 17:52:23 +00:00
|
|
|
wgetparams = concat
|
2017-12-14 16:46:57 +00:00
|
|
|
[ if BuildInfo.wgetunclutter && not quiet
|
2017-02-20 19:14:56 +00:00
|
|
|
then [Param "-nv", Param "--show-progress"]
|
2015-06-01 17:52:23 +00:00
|
|
|
else []
|
|
|
|
, [ Param "--clobber", Param "-c", Param "-O"]
|
2014-12-16 18:04:40 +00:00
|
|
|
]
|
2014-01-13 18:52:49 +00:00
|
|
|
#else
|
2015-06-01 17:52:23 +00:00
|
|
|
wgetparams = [Param "-c", Param "-O"]
|
2014-01-13 18:52:49 +00:00
|
|
|
#endif
|
2012-12-13 04:24:19 +00:00
|
|
|
{- Uses the -# progress display, because the normal
|
|
|
|
- one is very confusing when resuming, showing
|
|
|
|
- the remainder to download as the whole file,
|
|
|
|
- and not indicating how much percent was
|
|
|
|
- downloaded before the resume. -}
|
2015-03-27 14:22:32 +00:00
|
|
|
curl = do
|
|
|
|
-- curl does not create destination file
|
|
|
|
-- if the url happens to be empty, so pre-create.
|
|
|
|
writeFile file ""
|
2017-02-20 19:59:55 +00:00
|
|
|
go "curl" $ headerparams ++ quietopt "-sS" ++
|
2017-02-20 19:44:14 +00:00
|
|
|
[ Param "-f"
|
|
|
|
, Param "-L"
|
|
|
|
, Param "-C", Param "-"
|
|
|
|
, Param "-#"
|
|
|
|
, Param "-o"
|
|
|
|
]
|
2015-05-05 17:53:06 +00:00
|
|
|
|
|
|
|
{- Run wget in a temp directory because it has been buggy
|
|
|
|
- and overwritten files in the current directory, even though
|
|
|
|
- it was asked to write to a file elsewhere. -}
|
|
|
|
go cmd opts = withTmpDir "downloadurl" $ \tmp -> do
|
2015-05-07 22:47:24 +00:00
|
|
|
absfile <- absPath file
|
2016-12-13 15:56:23 +00:00
|
|
|
let ps = addUserAgent uo $ opts++reqParams uo++[File absfile, File url]
|
2015-05-05 17:53:06 +00:00
|
|
|
boolSystem' cmd ps $ \p -> p { cwd = Just tmp }
|
|
|
|
|
2013-05-25 05:47:19 +00:00
|
|
|
quietopt s
|
|
|
|
| quiet = [Param s]
|
|
|
|
| otherwise = []
|
2013-04-16 19:20:21 +00:00
|
|
|
|
2017-12-06 17:16:06 +00:00
|
|
|
{- Downloads at least the specified number of bytes from an url. -}
|
|
|
|
downloadPartial :: URLString -> UrlOptions -> Int -> IO (Maybe L.ByteString)
|
|
|
|
downloadPartial url uo n = case parseURIRelaxed url of
|
|
|
|
Nothing -> return Nothing
|
|
|
|
Just u -> go u `catchNonAsync` const (return Nothing)
|
|
|
|
where
|
|
|
|
go u = case parseUrlConduit (show u) of
|
|
|
|
Nothing -> return Nothing
|
|
|
|
Just req -> do
|
|
|
|
let req' = applyRequest uo req
|
2018-04-04 19:15:12 +00:00
|
|
|
withResponse req' (httpManager uo) $ \resp ->
|
2017-12-06 17:16:06 +00:00
|
|
|
if responseStatus resp == ok200
|
|
|
|
then Just <$> brread n [] (responseBody resp)
|
|
|
|
else return Nothing
|
|
|
|
|
|
|
|
-- could use brReadSome here, needs newer http-client dependency
|
|
|
|
brread n' l rb
|
|
|
|
| n' <= 0 = return (L.fromChunks (reverse l))
|
|
|
|
| otherwise = do
|
|
|
|
bs <- brRead rb
|
|
|
|
if B.null bs
|
|
|
|
then return (L.fromChunks (reverse l))
|
|
|
|
else brread (n' - B.length bs) (bs:l) rb
|
|
|
|
|
2013-03-11 03:00:33 +00:00
|
|
|
{- Allows for spaces and other stuff in urls, properly escaping them. -}
|
|
|
|
parseURIRelaxed :: URLString -> Maybe URI
|
2015-06-14 17:54:24 +00:00
|
|
|
parseURIRelaxed s = maybe (parseURIRelaxed' s) Just $
|
|
|
|
parseURI $ escapeURIString isAllowedInURI s
|
|
|
|
|
2017-12-06 17:16:06 +00:00
|
|
|
parseUrlConduit :: URLString -> Maybe Request
|
2017-12-11 16:49:23 +00:00
|
|
|
#if MIN_VERSION_http_client(0,4,30)
|
2017-12-06 17:16:06 +00:00
|
|
|
parseUrlConduit = parseUrlThrow
|
|
|
|
#else
|
|
|
|
parseUrlConduit = parseUrl
|
|
|
|
#endif
|
|
|
|
|
2015-06-14 17:54:24 +00:00
|
|
|
{- Some characters like '[' are allowed in eg, the address of
|
|
|
|
- an uri, but cannot appear unescaped further along in the uri.
|
|
|
|
- This handles that, expensively, by successively escaping each character
|
|
|
|
- from the back of the url until the url parses.
|
|
|
|
-}
|
|
|
|
parseURIRelaxed' :: URLString -> Maybe URI
|
|
|
|
parseURIRelaxed' s = go [] (reverse s)
|
2015-06-14 17:39:44 +00:00
|
|
|
where
|
2015-06-14 17:54:24 +00:00
|
|
|
go back [] = parseURI back
|
|
|
|
go back (c:cs) = case parseURI (escapeURIString isAllowedInURI (reverse (c:cs)) ++ back) of
|
|
|
|
Just u -> Just u
|
|
|
|
Nothing -> go (escapeURIChar escapemore c ++ back) cs
|
|
|
|
|
2015-06-14 17:39:44 +00:00
|
|
|
escapemore '[' = False
|
|
|
|
escapemore ']' = False
|
|
|
|
escapemore c = isAllowedInURI c
|
2014-08-17 19:39:01 +00:00
|
|
|
|
|
|
|
hAcceptEncoding :: CI.CI B.ByteString
|
|
|
|
hAcceptEncoding = "Accept-Encoding"
|
|
|
|
|
2015-01-22 18:52:52 +00:00
|
|
|
hContentDisposition :: CI.CI B.ByteString
|
|
|
|
hContentDisposition = "Content-Disposition"
|
|
|
|
|
2016-07-12 20:30:36 +00:00
|
|
|
{- Use with eg:
|
|
|
|
-
|
|
|
|
- > catchJust (matchStatusCodeException (== notFound404))
|
|
|
|
-}
|
2016-12-10 12:24:27 +00:00
|
|
|
#if MIN_VERSION_http_client(0,5,0)
|
|
|
|
matchStatusCodeException :: (Status -> Bool) -> HttpException -> Maybe HttpException
|
|
|
|
matchStatusCodeException want e@(HttpExceptionRequest _ (StatusCodeException r _))
|
|
|
|
| want (responseStatus r) = Just e
|
|
|
|
| otherwise = Nothing
|
|
|
|
matchStatusCodeException _ _ = Nothing
|
|
|
|
#else
|
2016-07-12 20:30:36 +00:00
|
|
|
matchStatusCodeException :: (Status -> Bool) -> HttpException -> Maybe HttpException
|
|
|
|
matchStatusCodeException want e@(StatusCodeException s _ _)
|
|
|
|
| want s = Just e
|
|
|
|
| otherwise = Nothing
|
|
|
|
matchStatusCodeException _ _ = Nothing
|
2016-12-10 12:24:27 +00:00
|
|
|
#endif
|
2017-09-12 19:13:42 +00:00
|
|
|
|
2017-09-13 19:35:42 +00:00
|
|
|
#if MIN_VERSION_http_client(0,5,0)
|
2017-09-12 19:13:42 +00:00
|
|
|
matchHttpExceptionContent :: (HttpExceptionContent -> Bool) -> HttpException -> Maybe HttpException
|
|
|
|
matchHttpExceptionContent want e@(HttpExceptionRequest _ hec)
|
|
|
|
| want hec = Just e
|
|
|
|
| otherwise = Nothing
|
|
|
|
matchHttpExceptionContent _ _ = Nothing
|
2017-09-13 19:35:42 +00:00
|
|
|
#else
|
|
|
|
matchHttpExceptionContent :: (HttpException -> Bool) -> HttpException -> Maybe HttpException
|
|
|
|
matchHttpExceptionContent want e
|
|
|
|
| want e = Just e
|
|
|
|
| otherwise = Nothing
|
|
|
|
#endif
|