git-annex/Utility/Url.hs

114 lines
3.5 KiB
Haskell
Raw Normal View History

2011-08-20 20:11:42 +00:00
{- Url downloading.
-
- Copyright 2011 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
2011-08-20 20:11:42 +00:00
module Utility.Url (
URLString,
check,
exists,
download,
get
) where
2012-03-16 00:39:25 +00:00
import Common
import qualified Network.Browser as Browser
import Network.HTTP
import Network.URI
import Data.Either
type URLString = String
type Headers = [String]
{- Checks that an url exists and could be successfully downloaded,
- also checking that its size, if available, matches a specified size. -}
check :: URLString -> Headers -> Maybe Integer -> IO Bool
check url headers expected_size = handle <$> exists url headers
where
handle (False, _) = False
handle (True, Nothing) = True
handle (True, s) = expected_size == s
{- Checks that an url exists and could be successfully downloaded,
- also returning its size if available. -}
exists :: URLString -> Headers -> IO (Bool, Maybe Integer)
exists url headers =
case parseURI url of
Nothing -> return (False, Nothing)
Just u -> do
r <- request u headers HEAD
case rspCode r of
(2,_,_) -> return (True, size r)
_ -> return (False, Nothing)
where
2012-03-16 00:39:25 +00:00
size = liftM Prelude.read . lookupHeader HdrContentLength . rspHeaders
{- Used to download large files, such as the contents of keys.
2011-12-20 22:00:09 +00:00
-
- Uses wget or curl program for its progress bar. (Wget has a better one,
2011-12-20 22:00:09 +00:00
- so is preferred.) Which program to use is determined at run time; it
- would not be appropriate to test at configure time and build support
- for only one in.
-}
download :: URLString -> Headers -> [CommandParam] -> FilePath -> IO Bool
download url headers options file = ifM (inPath "wget") (wget , curl)
where
headerparams = map (\h -> Param $ "--header=" ++ h) headers
2012-04-22 05:20:17 +00:00
wget = go "wget" $ headerparams ++ [Params "-c -O"]
2012-03-16 00:39:25 +00:00
{- Uses the -# progress display, because the normal
- one is very confusing when resuming, showing
- the remainder to download as the whole file,
- and not indicating how much percent was
- downloaded before the resume. -}
2012-04-22 05:20:17 +00:00
curl = go "curl" $ headerparams ++ [Params "-L -C - -# -o"]
2012-03-16 00:39:25 +00:00
go cmd opts = boolSystem cmd $
options++opts++[File file, File url]
{- Downloads a small file. -}
get :: URLString -> Headers -> IO String
get url headers =
case parseURI url of
Nothing -> error "url parse error"
Just u -> do
r <- request u headers GET
case rspCode r of
(2,_,_) -> return $ rspBody r
_ -> error $ rspReason r
{- Makes a http request of an url. For example, HEAD can be used to
- check if the url exists, or GET used to get the url content (best for
- small urls).
-
- This does its own redirect following because Browser's is buggy for HEAD
- requests.
-}
request :: URI -> Headers -> RequestMethod -> IO (Response String)
request url headers requesttype = go 5 url
where
go :: Int -> URI -> IO (Response String)
go 0 _ = error "Too many redirects "
go n u = do
rsp <- Browser.browse $ do
Browser.setErrHandler ignore
Browser.setOutHandler ignore
Browser.setAllowRedirects False
let req = mkRequest requesttype u :: Request_String
snd <$> Browser.request (addheaders req)
case rspCode rsp of
(3,0,x) | x /= 5 -> redir (n - 1) u rsp
_ -> return rsp
2012-04-22 03:32:33 +00:00
ignore = const noop
2012-02-16 04:41:30 +00:00
redir n u rsp = case retrieveHeaders HdrLocation rsp of
[] -> return rsp
(Header _ newu:_) ->
case parseURIReference newu of
Nothing -> return rsp
Just newURI -> go n newURI_abs
where
newURI_abs = fromMaybe newURI (newURI `relativeTo` u)
addheaders req = setHeaders req (rqHeaders req ++ userheaders)
userheaders = rights $ map parseHeader headers