addurl --fast: Use curl, rather than haskell HTTP library, to support https.
This commit is contained in:
parent
9ba8abffc6
commit
d3d791c7e7
7 changed files with 31 additions and 67 deletions
1
Makefile
1
Makefile
|
@ -6,7 +6,6 @@ BASEFLAGS=-Wall -outputdir $(GIT_ANNEX_TMP_BUILD_DIR) -IUtility
|
|||
# you can turn off some of these features.
|
||||
#
|
||||
# If you're using an old version of yesod, enable -DWITH_OLD_YESOD
|
||||
# Or with an old version of the uri library, enable -DWITH_OLD_URI
|
||||
FEATURES?=$(GIT_ANNEX_LOCAL_FEATURES) -DWITH_ASSISTANT -DWITH_S3 -DWITH_WEBDAV -DWITH_WEBAPP -DWITH_PAIRING -DWITH_XMPP -DWITH_DNS
|
||||
|
||||
bins=git-annex
|
||||
|
|
|
@ -16,8 +16,6 @@ module Utility.Url (
|
|||
) where
|
||||
|
||||
import Common
|
||||
import qualified Network.Browser as Browser
|
||||
import Network.HTTP
|
||||
import Network.URI
|
||||
import Data.Either
|
||||
|
||||
|
@ -38,20 +36,34 @@ check url headers expected_size = handle <$> exists url headers
|
|||
- also returning its size if available. -}
|
||||
exists :: URLString -> Headers -> IO (Bool, Maybe Integer)
|
||||
exists url headers = case parseURI url of
|
||||
Nothing -> return (False, Nothing)
|
||||
Just u
|
||||
| uriScheme u == "file:" -> do
|
||||
s <- catchMaybeIO $ getFileStatus (uriPath u)
|
||||
return $ case s of
|
||||
Nothing -> (False, Nothing)
|
||||
Just stat -> (True, Just $ fromIntegral $ fileSize stat)
|
||||
case s of
|
||||
Just stat -> return (True, Just $ fromIntegral $ fileSize stat)
|
||||
Nothing -> dne
|
||||
| otherwise -> do
|
||||
r <- request u headers HEAD
|
||||
case rspCode r of
|
||||
(2,_,_) -> return (True, size r)
|
||||
_ -> return (False, Nothing)
|
||||
output <- readProcess "curl" curlparams
|
||||
case lastMaybe (lines output) of
|
||||
Just ('2':_:_) -> return (True, extractsize output)
|
||||
_ -> dne
|
||||
Nothing -> dne
|
||||
where
|
||||
size = liftM Prelude.read . lookupHeader HdrContentLength . rspHeaders
|
||||
dne = return (False, Nothing)
|
||||
|
||||
curlparams =
|
||||
[ "-s"
|
||||
, "--head"
|
||||
, "-L"
|
||||
, url
|
||||
, "-w", "%{http_code}"
|
||||
] ++ concatMap (\h -> ["-H", h]) headers
|
||||
|
||||
extractsize s = case lastMaybe $ filter ("Content-Length:" `isPrefixOf`) (lines s) of
|
||||
Just l -> case lastMaybe $ words l of
|
||||
Just sz -> readish sz
|
||||
_ -> Nothing
|
||||
_ -> Nothing
|
||||
|
||||
{- Used to download large files, such as the contents of keys.
|
||||
-
|
||||
|
@ -80,54 +92,5 @@ download url headers options file
|
|||
|
||||
{- Downloads a small file. -}
|
||||
get :: URLString -> Headers -> IO String
|
||||
get url headers =
|
||||
case parseURI url of
|
||||
Nothing -> error "url parse error"
|
||||
Just u -> do
|
||||
r <- request u headers GET
|
||||
case rspCode r of
|
||||
(2,_,_) -> return $ rspBody r
|
||||
_ -> error $ rspReason r
|
||||
|
||||
{- Makes a http request of an url. For example, HEAD can be used to
|
||||
- check if the url exists, or GET used to get the url content (best for
|
||||
- small urls).
|
||||
-
|
||||
- This does its own redirect following because Browser's is buggy for HEAD
|
||||
- requests.
|
||||
-}
|
||||
request :: URI -> Headers -> RequestMethod -> IO (Response String)
|
||||
request url headers requesttype = go 5 url
|
||||
where
|
||||
go :: Int -> URI -> IO (Response String)
|
||||
go 0 _ = error "Too many redirects "
|
||||
go n u = do
|
||||
rsp <- Browser.browse $ do
|
||||
Browser.setErrHandler ignore
|
||||
Browser.setOutHandler ignore
|
||||
Browser.setAllowRedirects False
|
||||
let req = mkRequest requesttype u :: Request_String
|
||||
snd <$> Browser.request (addheaders req)
|
||||
case rspCode rsp of
|
||||
(3,0,x) | x /= 5 -> redir (n - 1) u rsp
|
||||
_ -> return rsp
|
||||
ignore = const noop
|
||||
redir n u rsp = case retrieveHeaders HdrLocation rsp of
|
||||
[] -> return rsp
|
||||
(Header _ newu:_) ->
|
||||
case parseURIReference newu of
|
||||
Nothing -> return rsp
|
||||
Just newURI -> go n newURI_abs
|
||||
where
|
||||
#if defined VERSION_network
|
||||
#if ! MIN_VERSION_network(2,4,0)
|
||||
#define WITH_OLD_URI
|
||||
#endif
|
||||
#endif
|
||||
#ifdef WITH_OLD_URI
|
||||
newURI_abs = fromMaybe newURI (newURI `relativeTo` u)
|
||||
#else
|
||||
newURI_abs = newURI `relativeTo` u
|
||||
#endif
|
||||
addheaders req = setHeaders req (rqHeaders req ++ userheaders)
|
||||
userheaders = rights $ map parseHeader headers
|
||||
get url headers = readProcess "curl" $
|
||||
["-s", "-L", url] ++ concatMap (\h -> ["-H", h]) headers
|
||||
|
|
1
debian/changelog
vendored
1
debian/changelog
vendored
|
@ -3,6 +3,7 @@ git-annex (3.20130125) UNRELEASED; urgency=low
|
|||
* webapp: Now allows restarting any threads that crash.
|
||||
* Adjust debian package to only build-depend on DAV on architectures
|
||||
where it is available.
|
||||
* addurl --fast: Use curl, rather than haskell HTTP library, to support https.
|
||||
|
||||
-- Joey Hess <joeyh@debian.org> Sat, 26 Jan 2013 15:48:40 +1100
|
||||
|
||||
|
|
1
debian/control
vendored
1
debian/control
vendored
|
@ -10,7 +10,6 @@ Build-Depends:
|
|||
libghc-pcre-light-dev,
|
||||
libghc-sha-dev,
|
||||
libghc-dataenc-dev,
|
||||
libghc-http-dev,
|
||||
libghc-utf8-string-dev,
|
||||
libghc-hs3-dev (>= 0.5.6),
|
||||
libghc-dav-dev (>= 0.3) [amd64 i386 kfreebsd-amd64 kfreebsd-i386 sparc],
|
||||
|
|
4
debian/rules
vendored
4
debian/rules
vendored
|
@ -1,9 +1,9 @@
|
|||
#!/usr/bin/make -f
|
||||
|
||||
ifeq (install ok installed,$(shell dpkg-query -W -f '$${Status}' libghc-yesod-dev 2>/dev/null))
|
||||
export FEATURES=-DWITH_ASSISTANT -DWITH_S3 -DWITH_HOST -DWITH_OLD_URI -DWITH_PAIRING -DWITH_XMPP -DWITH_WEBAPP -DWITH_OLD_YESOD
|
||||
export FEATURES=-DWITH_ASSISTANT -DWITH_S3 -DWITH_HOST -DWITH_PAIRING -DWITH_XMPP -DWITH_WEBAPP -DWITH_OLD_YESOD
|
||||
else
|
||||
export FEATURES=-DWITH_ASSISTANT -DWITH_S3 -DWITH_HOST -DWITH_OLD_URI -DWITH_PAIRING -DWITH_XMPP
|
||||
export FEATURES=-DWITH_ASSISTANT -DWITH_S3 -DWITH_HOST -DWITH_PAIRING -DWITH_XMPP
|
||||
endif
|
||||
ifeq (install ok installed,$(shell dpkg-query -W -f '$${Status}' libghc-dav-dev 2>/dev/null))
|
||||
export FEATURES:=${FEATURES} -DWITH_WEBDAV
|
||||
|
|
|
@ -13,7 +13,6 @@ quite a lot.
|
|||
* [lifted-base](http://hackage.haskell.org/package/lifted-base)
|
||||
* [TestPack](http://hackage.haskell.org/cgi-bin/hackage-scripts/package/testpack)
|
||||
* [QuickCheck 2](http://hackage.haskell.org/package/QuickCheck)
|
||||
* [HTTP](http://hackage.haskell.org/package/HTTP)
|
||||
* [json](http://hackage.haskell.org/package/json)
|
||||
* [IfElse](http://hackage.haskell.org/package/IfElse)
|
||||
* [bloomfilter](http://hackage.haskell.org/package/bloomfilter)
|
||||
|
|
|
@ -6,3 +6,6 @@ To give an example, here is a PDF file:
|
|||
If you switch the https: to http: it redirects you back to https:.
|
||||
|
||||
As more sites provide https: for non-secret traffic, this becomes more of an issue.
|
||||
|
||||
> I've gotten rid of the use of the HTTP library, now it just uses curl.
|
||||
> [[done]] --[[Joey]]
|
||||
|
|
Loading…
Reference in a new issue