2011-08-20 20:11:42 +00:00
|
|
|
{- Url downloading.
|
2011-08-17 00:49:04 +00:00
|
|
|
-
|
2013-04-16 19:20:21 +00:00
|
|
|
- Copyright 2011,2013 Joey Hess <joey@kitenet.net>
|
2011-08-17 00:49:04 +00:00
|
|
|
-
|
|
|
|
- Licensed under the GNU GPL version 3 or higher.
|
|
|
|
-}
|
|
|
|
|
2012-10-10 15:26:30 +00:00
|
|
|
{-# LANGUAGE CPP #-}
|
|
|
|
|
2011-08-20 20:11:42 +00:00
|
|
|
module Utility.Url (
|
2012-01-02 18:20:20 +00:00
|
|
|
URLString,
|
2012-02-10 23:17:41 +00:00
|
|
|
check,
|
2011-08-17 00:49:04 +00:00
|
|
|
exists,
|
|
|
|
download,
|
2013-05-25 05:47:19 +00:00
|
|
|
downloadQuiet
|
2011-08-17 00:49:04 +00:00
|
|
|
) where
|
|
|
|
|
2012-03-16 00:39:25 +00:00
|
|
|
import Common
|
2011-08-17 00:49:04 +00:00
|
|
|
import Network.URI
|
2013-04-16 19:20:21 +00:00
|
|
|
import qualified Network.Browser as Browser
|
|
|
|
import Network.HTTP
|
|
|
|
import Data.Either
|
|
|
|
|
|
|
|
import qualified Build.SysConfig
|
2011-08-17 00:49:04 +00:00
|
|
|
|
|
|
|
type URLString = String
|
|
|
|
|
2012-04-22 05:13:09 +00:00
|
|
|
type Headers = [String]
|
|
|
|
|
2012-02-10 23:17:41 +00:00
|
|
|
{- Checks that an url exists and could be successfully downloaded,
|
|
|
|
- also checking that its size, if available, matches a specified size. -}
|
2012-04-22 05:13:09 +00:00
|
|
|
check :: URLString -> Headers -> Maybe Integer -> IO Bool
|
|
|
|
check url headers expected_size = handle <$> exists url headers
|
2012-12-13 04:24:19 +00:00
|
|
|
where
|
|
|
|
handle (False, _) = False
|
|
|
|
handle (True, Nothing) = True
|
|
|
|
handle (True, s) = expected_size == s
|
2012-02-10 23:17:41 +00:00
|
|
|
|
|
|
|
{- Checks that an url exists and could be successfully downloaded,
|
2013-04-16 19:20:21 +00:00
|
|
|
- also returning its size if available.
|
|
|
|
-
|
|
|
|
- For a file: url, check it directly.
|
|
|
|
-
|
|
|
|
- Uses curl otherwise, when available, since curl handles https better
|
|
|
|
- than does Haskell's Network.Browser.
|
|
|
|
-}
|
2012-04-22 05:13:09 +00:00
|
|
|
exists :: URLString -> Headers -> IO (Bool, Maybe Integer)
|
2013-03-11 03:00:33 +00:00
|
|
|
exists url headers = case parseURIRelaxed url of
|
2012-10-21 05:28:10 +00:00
|
|
|
Just u
|
|
|
|
| uriScheme u == "file:" -> do
|
2013-03-11 03:00:33 +00:00
|
|
|
s <- catchMaybeIO $ getFileStatus (unEscapeString $ uriPath u)
|
2013-01-26 22:30:53 +00:00
|
|
|
case s of
|
|
|
|
Just stat -> return (True, Just $ fromIntegral $ fileSize stat)
|
|
|
|
Nothing -> dne
|
2013-04-16 19:20:21 +00:00
|
|
|
| otherwise -> if Build.SysConfig.curl
|
|
|
|
then do
|
|
|
|
output <- readProcess "curl" curlparams
|
|
|
|
case lastMaybe (lines output) of
|
|
|
|
Just ('2':_:_) -> return (True, extractsize output)
|
|
|
|
_ -> dne
|
|
|
|
else do
|
|
|
|
r <- request u headers HEAD
|
|
|
|
case rspCode r of
|
|
|
|
(2,_,_) -> return (True, size r)
|
|
|
|
_ -> return (False, Nothing)
|
2013-01-26 22:30:53 +00:00
|
|
|
Nothing -> dne
|
2012-12-13 04:24:19 +00:00
|
|
|
where
|
2013-01-26 22:30:53 +00:00
|
|
|
dne = return (False, Nothing)
|
|
|
|
|
|
|
|
curlparams =
|
|
|
|
[ "-s"
|
|
|
|
, "--head"
|
|
|
|
, "-L"
|
|
|
|
, url
|
|
|
|
, "-w", "%{http_code}"
|
|
|
|
] ++ concatMap (\h -> ["-H", h]) headers
|
|
|
|
|
|
|
|
extractsize s = case lastMaybe $ filter ("Content-Length:" `isPrefixOf`) (lines s) of
|
|
|
|
Just l -> case lastMaybe $ words l of
|
|
|
|
Just sz -> readish sz
|
|
|
|
_ -> Nothing
|
|
|
|
_ -> Nothing
|
2011-08-17 00:49:04 +00:00
|
|
|
|
2013-04-16 19:20:21 +00:00
|
|
|
size = liftM Prelude.read . lookupHeader HdrContentLength . rspHeaders
|
|
|
|
|
2011-08-17 00:49:04 +00:00
|
|
|
{- Used to download large files, such as the contents of keys.
|
2011-12-20 22:00:09 +00:00
|
|
|
-
|
2011-08-27 16:31:50 +00:00
|
|
|
- Uses wget or curl program for its progress bar. (Wget has a better one,
|
2011-12-20 22:00:09 +00:00
|
|
|
- so is preferred.) Which program to use is determined at run time; it
|
|
|
|
- would not be appropriate to test at configure time and build support
|
|
|
|
- for only one in.
|
|
|
|
-}
|
2012-04-22 05:13:09 +00:00
|
|
|
download :: URLString -> Headers -> [CommandParam] -> FilePath -> IO Bool
|
2013-05-25 05:47:19 +00:00
|
|
|
download = download' False
|
|
|
|
|
|
|
|
{- No output, even on error. -}
|
|
|
|
downloadQuiet :: URLString -> Headers -> [CommandParam] -> FilePath -> IO Bool
|
|
|
|
downloadQuiet = download' True
|
|
|
|
|
|
|
|
download' :: Bool -> URLString -> Headers -> [CommandParam] -> FilePath -> IO Bool
|
|
|
|
download' quiet url headers options file =
|
2013-03-11 03:00:33 +00:00
|
|
|
case parseURIRelaxed url of
|
|
|
|
Just u
|
|
|
|
| uriScheme u == "file:" -> do
|
|
|
|
-- curl does not create destination file
|
|
|
|
-- for an empty file:// url, so pre-create
|
|
|
|
writeFile file ""
|
|
|
|
curl
|
|
|
|
| otherwise -> ifM (inPath "wget") (wget , curl)
|
|
|
|
_ -> return False
|
2012-12-13 04:24:19 +00:00
|
|
|
where
|
|
|
|
headerparams = map (\h -> Param $ "--header=" ++ h) headers
|
2013-05-25 05:47:19 +00:00
|
|
|
wget = go "wget" $ headerparams ++ quietopt "-q" ++ [Params "-c -O"]
|
2012-12-13 04:24:19 +00:00
|
|
|
{- Uses the -# progress display, because the normal
|
|
|
|
- one is very confusing when resuming, showing
|
|
|
|
- the remainder to download as the whole file,
|
|
|
|
- and not indicating how much percent was
|
|
|
|
- downloaded before the resume. -}
|
2013-07-06 04:55:00 +00:00
|
|
|
curl = go "curl" $ headerparams ++ quietopt "-s" ++
|
|
|
|
[Params "-f -L -C - -# -o"]
|
2012-12-13 04:24:19 +00:00
|
|
|
go cmd opts = boolSystem cmd $
|
|
|
|
options++opts++[File file, File url]
|
2013-05-25 05:47:19 +00:00
|
|
|
quietopt s
|
|
|
|
| quiet = [Param s]
|
|
|
|
| otherwise = []
|
2013-04-16 19:20:21 +00:00
|
|
|
|
|
|
|
{- Uses Network.Browser to make a http request of an url.
|
|
|
|
- For example, HEAD can be used to check if the url exists,
|
|
|
|
- or GET used to get the url content (best for small urls).
|
|
|
|
-
|
|
|
|
- This does its own redirect following because Browser's is buggy for HEAD
|
|
|
|
- requests.
|
2013-07-28 19:27:36 +00:00
|
|
|
-
|
|
|
|
- Unfortunately, does not handle https, so should only be used
|
|
|
|
- when curl is not available.
|
2013-04-16 19:20:21 +00:00
|
|
|
-}
|
|
|
|
request :: URI -> Headers -> RequestMethod -> IO (Response String)
|
|
|
|
request url headers requesttype = go 5 url
|
|
|
|
where
|
|
|
|
go :: Int -> URI -> IO (Response String)
|
|
|
|
go 0 _ = error "Too many redirects "
|
|
|
|
go n u = do
|
|
|
|
rsp <- Browser.browse $ do
|
|
|
|
Browser.setErrHandler ignore
|
|
|
|
Browser.setOutHandler ignore
|
|
|
|
Browser.setAllowRedirects False
|
|
|
|
let req = mkRequest requesttype u :: Request_String
|
|
|
|
snd <$> Browser.request (addheaders req)
|
|
|
|
case rspCode rsp of
|
|
|
|
(3,0,x) | x /= 5 -> redir (n - 1) u rsp
|
|
|
|
_ -> return rsp
|
|
|
|
addheaders req = setHeaders req (rqHeaders req ++ userheaders)
|
|
|
|
userheaders = rights $ map parseHeader headers
|
|
|
|
ignore = const noop
|
|
|
|
redir n u rsp = case retrieveHeaders HdrLocation rsp of
|
|
|
|
[] -> return rsp
|
|
|
|
(Header _ newu:_) ->
|
|
|
|
case parseURIReference newu of
|
|
|
|
Nothing -> return rsp
|
|
|
|
Just newURI -> go n $
|
|
|
|
#if defined VERSION_network
|
|
|
|
#if ! MIN_VERSION_network(2,4,0)
|
|
|
|
#define WITH_OLD_URI
|
|
|
|
#endif
|
|
|
|
#endif
|
|
|
|
#ifdef WITH_OLD_URI
|
|
|
|
fromMaybe newURI (newURI `relativeTo` u)
|
|
|
|
#else
|
|
|
|
newURI `relativeTo` u
|
|
|
|
#endif
|
2013-03-11 03:00:33 +00:00
|
|
|
|
|
|
|
{- Allows for spaces and other stuff in urls, properly escaping them. -}
|
|
|
|
parseURIRelaxed :: URLString -> Maybe URI
|
|
|
|
parseURIRelaxed = parseURI . escapeURIString isAllowedInURI
|