2011-08-20 20:11:42 +00:00
|
|
|
{- Url downloading.
|
2011-08-17 00:49:04 +00:00
|
|
|
-
|
2013-04-16 19:20:21 +00:00
|
|
|
- Copyright 2011,2013 Joey Hess <joey@kitenet.net>
|
2011-08-17 00:49:04 +00:00
|
|
|
-
|
|
|
|
- Licensed under the GNU GPL version 3 or higher.
|
|
|
|
-}
|
|
|
|
|
2012-10-10 15:26:30 +00:00
|
|
|
{-# LANGUAGE CPP #-}
|
|
|
|
|
2011-08-20 20:11:42 +00:00
|
|
|
module Utility.Url (
|
2012-01-02 18:20:20 +00:00
|
|
|
URLString,
|
2013-09-28 18:35:21 +00:00
|
|
|
UserAgent,
|
2014-02-25 02:00:25 +00:00
|
|
|
UrlOptions(..),
|
2012-02-10 23:17:41 +00:00
|
|
|
check,
|
2013-10-11 17:05:00 +00:00
|
|
|
checkBoth,
|
2011-08-17 00:49:04 +00:00
|
|
|
exists,
|
|
|
|
download,
|
2013-11-25 03:44:30 +00:00
|
|
|
downloadQuiet,
|
|
|
|
parseURIRelaxed
|
2011-08-17 00:49:04 +00:00
|
|
|
) where
|
|
|
|
|
2012-03-16 00:39:25 +00:00
|
|
|
import Common
|
2011-08-17 00:49:04 +00:00
|
|
|
import Network.URI
|
2013-04-16 19:20:21 +00:00
|
|
|
import qualified Network.Browser as Browser
|
|
|
|
import Network.HTTP
|
|
|
|
import Data.Either
|
2014-02-25 02:00:25 +00:00
|
|
|
import Data.Default
|
2013-04-16 19:20:21 +00:00
|
|
|
|
|
|
|
import qualified Build.SysConfig
|
2011-08-17 00:49:04 +00:00
|
|
|
|
|
|
|
type URLString = String
|
|
|
|
|
2012-04-22 05:13:09 +00:00
|
|
|
type Headers = [String]
|
|
|
|
|
2013-09-28 18:35:21 +00:00
|
|
|
type UserAgent = String
|
|
|
|
|
2014-02-25 02:00:25 +00:00
|
|
|
data UrlOptions = UrlOptions
|
|
|
|
{ userAgent :: Maybe UserAgent
|
|
|
|
, reqHeaders :: Headers
|
|
|
|
, reqParams :: [CommandParam]
|
|
|
|
}
|
|
|
|
|
|
|
|
instance Default UrlOptions
|
|
|
|
where
|
|
|
|
def = UrlOptions Nothing [] []
|
|
|
|
|
2012-02-10 23:17:41 +00:00
|
|
|
{- Checks that an url exists and could be successfully downloaded,
|
|
|
|
- also checking that its size, if available, matches a specified size. -}
|
2014-02-25 02:00:25 +00:00
|
|
|
checkBoth :: URLString -> Maybe Integer -> UrlOptions -> IO Bool
|
|
|
|
checkBoth url expected_size uo = do
|
|
|
|
v <- check url expected_size uo
|
2013-10-11 17:05:00 +00:00
|
|
|
return (fst v && snd v)
|
2014-02-25 02:00:25 +00:00
|
|
|
check :: URLString -> Maybe Integer -> UrlOptions -> IO (Bool, Bool)
|
|
|
|
check url expected_size = handle <$$> exists url
|
2012-12-13 04:24:19 +00:00
|
|
|
where
|
2013-10-11 17:05:00 +00:00
|
|
|
handle (False, _) = (False, False)
|
|
|
|
handle (True, Nothing) = (True, True)
|
|
|
|
handle (True, s) = case expected_size of
|
|
|
|
Just _ -> (True, expected_size == s)
|
|
|
|
Nothing -> (True, True)
|
2012-02-10 23:17:41 +00:00
|
|
|
|
|
|
|
{- Checks that an url exists and could be successfully downloaded,
|
2013-04-16 19:20:21 +00:00
|
|
|
- also returning its size if available.
|
|
|
|
-
|
|
|
|
- For a file: url, check it directly.
|
|
|
|
-
|
|
|
|
- Uses curl otherwise, when available, since curl handles https better
|
|
|
|
- than does Haskell's Network.Browser.
|
|
|
|
-}
|
2014-02-25 02:00:25 +00:00
|
|
|
exists :: URLString -> UrlOptions -> IO (Bool, Maybe Integer)
|
|
|
|
exists url uo = case parseURIRelaxed url of
|
2012-10-21 05:28:10 +00:00
|
|
|
Just u
|
|
|
|
| uriScheme u == "file:" -> do
|
2013-03-11 03:00:33 +00:00
|
|
|
s <- catchMaybeIO $ getFileStatus (unEscapeString $ uriPath u)
|
2013-01-26 22:30:53 +00:00
|
|
|
case s of
|
|
|
|
Just stat -> return (True, Just $ fromIntegral $ fileSize stat)
|
|
|
|
Nothing -> dne
|
2013-04-16 19:20:21 +00:00
|
|
|
| otherwise -> if Build.SysConfig.curl
|
|
|
|
then do
|
2013-09-28 18:35:21 +00:00
|
|
|
output <- readProcess "curl" $ toCommand curlparams
|
2013-04-16 19:20:21 +00:00
|
|
|
case lastMaybe (lines output) of
|
|
|
|
Just ('2':_:_) -> return (True, extractsize output)
|
|
|
|
_ -> dne
|
|
|
|
else do
|
2014-02-25 02:00:25 +00:00
|
|
|
r <- request u HEAD uo
|
2013-04-16 19:20:21 +00:00
|
|
|
case rspCode r of
|
|
|
|
(2,_,_) -> return (True, size r)
|
|
|
|
_ -> return (False, Nothing)
|
2013-01-26 22:30:53 +00:00
|
|
|
Nothing -> dne
|
2012-12-13 04:24:19 +00:00
|
|
|
where
|
2013-01-26 22:30:53 +00:00
|
|
|
dne = return (False, Nothing)
|
|
|
|
|
2014-02-25 02:00:25 +00:00
|
|
|
curlparams = addUserAgent uo $
|
2013-09-28 18:35:21 +00:00
|
|
|
[ Param "-s"
|
|
|
|
, Param "--head"
|
|
|
|
, Param "-L", Param url
|
|
|
|
, Param "-w", Param "%{http_code}"
|
2014-02-25 02:00:25 +00:00
|
|
|
] ++ concatMap (\h -> [Param "-H", Param h]) (reqHeaders uo) ++ (reqParams uo)
|
2013-01-26 22:30:53 +00:00
|
|
|
|
|
|
|
extractsize s = case lastMaybe $ filter ("Content-Length:" `isPrefixOf`) (lines s) of
|
|
|
|
Just l -> case lastMaybe $ words l of
|
|
|
|
Just sz -> readish sz
|
|
|
|
_ -> Nothing
|
|
|
|
_ -> Nothing
|
2011-08-17 00:49:04 +00:00
|
|
|
|
2013-04-16 19:20:21 +00:00
|
|
|
size = liftM Prelude.read . lookupHeader HdrContentLength . rspHeaders
|
|
|
|
|
2013-09-28 18:35:21 +00:00
|
|
|
-- works for both wget and curl commands
|
2014-02-25 02:00:25 +00:00
|
|
|
addUserAgent :: UrlOptions -> [CommandParam] -> [CommandParam]
|
|
|
|
addUserAgent uo ps = case userAgent uo of
|
|
|
|
Nothing -> ps
|
|
|
|
Just ua -> ps ++ [Param "--user-agent", Param ua]
|
2013-09-28 18:35:21 +00:00
|
|
|
|
2011-08-17 00:49:04 +00:00
|
|
|
{- Used to download large files, such as the contents of keys.
|
2011-12-20 22:00:09 +00:00
|
|
|
-
|
2011-08-27 16:31:50 +00:00
|
|
|
- Uses wget or curl program for its progress bar. (Wget has a better one,
|
2011-12-20 22:00:09 +00:00
|
|
|
- so is preferred.) Which program to use is determined at run time; it
|
|
|
|
- would not be appropriate to test at configure time and build support
|
|
|
|
- for only one in.
|
|
|
|
-}
|
2014-02-25 02:00:25 +00:00
|
|
|
download :: URLString -> FilePath -> UrlOptions -> IO Bool
|
2013-05-25 05:47:19 +00:00
|
|
|
download = download' False
|
|
|
|
|
|
|
|
{- No output, even on error. -}
|
2014-02-25 02:00:25 +00:00
|
|
|
downloadQuiet :: URLString -> FilePath -> UrlOptions -> IO Bool
|
2013-05-25 05:47:19 +00:00
|
|
|
downloadQuiet = download' True
|
|
|
|
|
2014-02-25 02:00:25 +00:00
|
|
|
download' :: Bool -> URLString -> FilePath -> UrlOptions -> IO Bool
|
|
|
|
download' quiet url file uo =
|
2013-03-11 03:00:33 +00:00
|
|
|
case parseURIRelaxed url of
|
|
|
|
Just u
|
|
|
|
| uriScheme u == "file:" -> do
|
|
|
|
-- curl does not create destination file
|
|
|
|
-- for an empty file:// url, so pre-create
|
|
|
|
writeFile file ""
|
|
|
|
curl
|
|
|
|
| otherwise -> ifM (inPath "wget") (wget , curl)
|
|
|
|
_ -> return False
|
2012-12-13 04:24:19 +00:00
|
|
|
where
|
2014-02-25 02:00:25 +00:00
|
|
|
headerparams = map (\h -> Param $ "--header=" ++ h) (reqHeaders uo)
|
2014-01-13 18:52:49 +00:00
|
|
|
wget = go "wget" $ headerparams ++ quietopt "-q" ++ wgetparams
|
|
|
|
{- Regular wget needs --clobber to continue downloading an existing
|
|
|
|
- file. On Android, busybox wget is used, which does not
|
|
|
|
- support, or need that option. -}
|
|
|
|
#ifndef __ANDROID__
|
|
|
|
wgetparams = [Params "--clobber -c -O"]
|
|
|
|
#else
|
|
|
|
wgetparams = [Params "-c -O"]
|
|
|
|
#endif
|
2012-12-13 04:24:19 +00:00
|
|
|
{- Uses the -# progress display, because the normal
|
|
|
|
- one is very confusing when resuming, showing
|
|
|
|
- the remainder to download as the whole file,
|
|
|
|
- and not indicating how much percent was
|
|
|
|
- downloaded before the resume. -}
|
2013-07-06 04:55:00 +00:00
|
|
|
curl = go "curl" $ headerparams ++ quietopt "-s" ++
|
|
|
|
[Params "-f -L -C - -# -o"]
|
2013-08-21 22:17:48 +00:00
|
|
|
go cmd opts = boolSystem cmd $
|
2014-02-25 02:00:25 +00:00
|
|
|
addUserAgent uo $ reqParams uo++opts++[File file, File url]
|
2013-05-25 05:47:19 +00:00
|
|
|
quietopt s
|
|
|
|
| quiet = [Param s]
|
|
|
|
| otherwise = []
|
2013-04-16 19:20:21 +00:00
|
|
|
|
|
|
|
{- Uses Network.Browser to make a http request of an url.
|
|
|
|
- For example, HEAD can be used to check if the url exists,
|
|
|
|
- or GET used to get the url content (best for small urls).
|
|
|
|
-
|
|
|
|
- This does its own redirect following because Browser's is buggy for HEAD
|
|
|
|
- requests.
|
2013-07-28 19:27:36 +00:00
|
|
|
-
|
|
|
|
- Unfortunately, does not handle https, so should only be used
|
|
|
|
- when curl is not available.
|
2013-04-16 19:20:21 +00:00
|
|
|
-}
|
2014-02-25 02:00:25 +00:00
|
|
|
request :: URI -> RequestMethod -> UrlOptions -> IO (Response String)
|
|
|
|
request url requesttype uo = go 5 url
|
2013-04-16 19:20:21 +00:00
|
|
|
where
|
|
|
|
go :: Int -> URI -> IO (Response String)
|
|
|
|
go 0 _ = error "Too many redirects "
|
|
|
|
go n u = do
|
|
|
|
rsp <- Browser.browse $ do
|
2014-02-25 02:00:25 +00:00
|
|
|
maybe noop Browser.setUserAgent (userAgent uo)
|
2013-04-16 19:20:21 +00:00
|
|
|
Browser.setErrHandler ignore
|
|
|
|
Browser.setOutHandler ignore
|
|
|
|
Browser.setAllowRedirects False
|
|
|
|
let req = mkRequest requesttype u :: Request_String
|
|
|
|
snd <$> Browser.request (addheaders req)
|
|
|
|
case rspCode rsp of
|
|
|
|
(3,0,x) | x /= 5 -> redir (n - 1) u rsp
|
|
|
|
_ -> return rsp
|
|
|
|
addheaders req = setHeaders req (rqHeaders req ++ userheaders)
|
2014-02-25 02:00:25 +00:00
|
|
|
userheaders = rights $ map parseHeader (reqHeaders uo)
|
2013-04-16 19:20:21 +00:00
|
|
|
ignore = const noop
|
|
|
|
redir n u rsp = case retrieveHeaders HdrLocation rsp of
|
|
|
|
[] -> return rsp
|
|
|
|
(Header _ newu:_) ->
|
|
|
|
case parseURIReference newu of
|
|
|
|
Nothing -> return rsp
|
|
|
|
Just newURI -> go n $
|
|
|
|
#if defined VERSION_network
|
|
|
|
#if ! MIN_VERSION_network(2,4,0)
|
|
|
|
#define WITH_OLD_URI
|
|
|
|
#endif
|
|
|
|
#endif
|
|
|
|
#ifdef WITH_OLD_URI
|
|
|
|
fromMaybe newURI (newURI `relativeTo` u)
|
|
|
|
#else
|
|
|
|
newURI `relativeTo` u
|
|
|
|
#endif
|
2013-03-11 03:00:33 +00:00
|
|
|
|
|
|
|
{- Allows for spaces and other stuff in urls, properly escaping them. -}
|
|
|
|
parseURIRelaxed :: URLString -> Maybe URI
|
|
|
|
parseURIRelaxed = parseURI . escapeURIString isAllowedInURI
|