add back support for ftp urls

Add back support for ftp urls, which was disabled as part of the fix for
security hole CVE-2018-10857 (except for configurations which enabled curl
and bypassed public IP address restrictions). Now it will work if allowed
by annex.security.allowed-ip-addresses.
This commit is contained in:
Joey Hess 2019-05-30 14:51:34 -04:00
parent 1871295765
commit 67c06f5121
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
7 changed files with 165 additions and 29 deletions

View file

@ -59,7 +59,8 @@ getUrlOptions = Annex.getState Annex.urloptions >>= \case
-- it from accessing specific IP addresses. -- it from accessing specific IP addresses.
curlopts <- map Param . annexWebOptions <$> Annex.getGitConfig curlopts <- map Param . annexWebOptions <$> Annex.getGitConfig
let urldownloader = if null curlopts let urldownloader = if null curlopts
then U.DownloadWithConduit then U.DownloadWithConduit $
U.DownloadWithCurlRestricted mempty
else U.DownloadWithCurl curlopts else U.DownloadWithCurl curlopts
manager <- liftIO $ U.newManager U.managerSettings manager <- liftIO $ U.newManager U.managerSettings
return (urldownloader, manager) return (urldownloader, manager)
@ -78,7 +79,7 @@ getUrlOptions = Annex.getState Annex.urloptions >>= \case
let connectionrestricted = addrConnectionRestricted let connectionrestricted = addrConnectionRestricted
("Configuration of annex.security.allowed-ip-addresses does not allow accessing address " ++) ("Configuration of annex.security.allowed-ip-addresses does not allow accessing address " ++)
let r = Restriction let r = Restriction
{ addressRestriction = \addr -> { checkAddressRestriction = \addr ->
if isallowed (addrAddress addr) if isallowed (addrAddress addr)
then Nothing then Nothing
else Just (connectionrestricted addr) else Just (connectionrestricted addr)
@ -90,7 +91,9 @@ getUrlOptions = Annex.getState Annex.urloptions >>= \case
Just ProxyRestricted -> toplevelWarning True Just ProxyRestricted -> toplevelWarning True
"http proxy settings not used due to annex.security.allowed-ip-addresses configuration" "http proxy settings not used due to annex.security.allowed-ip-addresses configuration"
manager <- liftIO $ U.newManager settings manager <- liftIO $ U.newManager settings
return (U.DownloadWithConduit, manager) let urldownloader = U.DownloadWithConduit $
U.DownloadWithCurlRestricted r
return (urldownloader, manager)
ipAddressesUnlimited :: Annex Bool ipAddressesUnlimited :: Annex Bool
ipAddressesUnlimited = ipAddressesUnlimited =

View file

@ -24,6 +24,10 @@ git-annex (7.20190508) UNRELEASED; urgency=medium
annex.security.allowed-ip-addresses because it is not really specific annex.security.allowed-ip-addresses because it is not really specific
to the http protocol, also limiting eg, git-annex's use of ftp. to the http protocol, also limiting eg, git-annex's use of ftp.
The old name for the config will still work. The old name for the config will still work.
* Add back support for ftp urls, which was disabled as part of the fix for
security hole CVE-2018-10857 (except for configurations which enabled curl
and bypassed public IP address restrictions). Now it will work
if allowed by annex.security.allowed-ip-addresses.
-- Joey Hess <id@joeyh.name> Mon, 06 May 2019 13:52:02 -0400 -- Joey Hess <id@joeyh.name> Mon, 06 May 2019 13:52:02 -0400

View file

@ -30,11 +30,39 @@ import qualified Data.ByteString.UTF8 as BU
import Data.Default import Data.Default
import Data.Typeable import Data.Typeable
import Control.Applicative import Control.Applicative
#if MIN_VERSION_base(4,9,0)
import qualified Data.Semigroup as Sem
#endif
import Data.Monoid
import Prelude
data Restriction = Restriction data Restriction = Restriction
{ addressRestriction :: AddrInfo -> Maybe ConnectionRestricted { checkAddressRestriction :: AddrInfo -> Maybe ConnectionRestricted
} }
appendRestrictions :: Restriction -> Restriction -> Restriction
appendRestrictions a b = Restriction
{ checkAddressRestriction = \addr ->
checkAddressRestriction a addr <|> checkAddressRestriction b addr
}
-- | mempty does not restrict HTTP connections in any way
instance Monoid Restriction where
mempty = Restriction
{ checkAddressRestriction = \_ -> Nothing
}
#if MIN_VERSION_base(4,11,0)
#elif MIN_VERSION_base(4,9,0)
mappend = (Sem.<>)
#else
mappend = appendRestrictions
#endif
#if MIN_VERSION_base(4,9,0)
instance Sem.Semigroup Restriction where
(<>) = appendRestrictions
#endif
-- | An exception used to indicate that the connection was restricted. -- | An exception used to indicate that the connection was restricted.
data ConnectionRestricted = ConnectionRestricted String data ConnectionRestricted = ConnectionRestricted String
deriving (Show, Typeable) deriving (Show, Typeable)
@ -117,7 +145,7 @@ restrictProxy cfg base = do
return $ proxy $ f $ dummyreq https return $ proxy $ f $ dummyreq https
mkproxy Nothing = (noProxy, Nothing) mkproxy Nothing = (noProxy, Nothing)
mkproxy (Just proxyaddr) = case addressRestriction cfg proxyaddr of mkproxy (Just proxyaddr) = case checkAddressRestriction cfg proxyaddr of
Nothing -> (addrtoproxy (addrAddress proxyaddr), Nothing) Nothing -> (addrtoproxy (addrAddress proxyaddr), Nothing)
Just _ -> (noProxy, Just ProxyRestricted) Just _ -> (noProxy, Just ProxyRestricted)
@ -200,7 +228,7 @@ getConnection cfg tls = do
close close
(\sock -> NC.connectFromSocket context sock connparams) (\sock -> NC.connectFromSocket context sock connparams)
where where
tryToConnect addr = case addressRestriction cfg addr of tryToConnect addr = case checkAddressRestriction cfg addr of
Nothing -> bracketOnError Nothing -> bracketOnError
(socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)) (socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr))
close close

View file

@ -12,9 +12,22 @@ import Utility.Exception
import Network.Socket import Network.Socket
import Data.Word import Data.Word
import Data.Memory.Endian import Data.Memory.Endian
import Data.List
import Control.Applicative import Control.Applicative
import Text.Printf
import Prelude import Prelude
extractIPAddress :: SockAddr -> Maybe String
extractIPAddress (SockAddrInet _ ipv4) =
let (a,b,c,d) = hostAddressToTuple ipv4
in Just $ intercalate "." [show a, show b, show c, show d]
extractIPAddress (SockAddrInet6 _ _ ipv6 _) =
let (a,b,c,d,e,f,g,h) = hostAddress6ToTuple ipv6
in Just $ intercalate ":" [s a, s b, s c, s d, s e, s f, s g, s h]
where
s = printf "%x"
extractIPAddress _ = Nothing
{- Check if an IP address is a loopback address; connecting to it {- Check if an IP address is a loopback address; connecting to it
- may connect back to the local host. -} - may connect back to the local host. -}
isLoopbackAddress :: SockAddr -> Bool isLoopbackAddress :: SockAddr -> Bool

View file

@ -1,6 +1,6 @@
{- Url downloading. {- Url downloading.
- -
- Copyright 2011-2018 Joey Hess <id@joeyh.name> - Copyright 2011-2019 Joey Hess <id@joeyh.name>
- -
- License: BSD-2-clause - License: BSD-2-clause
-} -}
@ -19,6 +19,7 @@ module Utility.Url (
mkScheme, mkScheme,
allowedScheme, allowedScheme,
UrlDownloader(..), UrlDownloader(..),
NonHttpUrlDownloader(..),
UrlOptions(..), UrlOptions(..),
defUrlOptions, defUrlOptions,
mkUrlOptions, mkUrlOptions,
@ -40,18 +41,25 @@ module Utility.Url (
import Common import Common
import Utility.Metered import Utility.Metered
import Utility.HttpManagerRestricted import Utility.HttpManagerRestricted
import Utility.IPAddress
import Network.URI import Network.URI
import Network.HTTP.Types import Network.HTTP.Types
import qualified Network.Connection as NC
import qualified Data.CaseInsensitive as CI import qualified Data.CaseInsensitive as CI
import qualified Data.ByteString as B import qualified Data.ByteString as B
import qualified Data.ByteString.UTF8 as B8 import qualified Data.ByteString.UTF8 as B8
import qualified Data.ByteString.Lazy as L import qualified Data.ByteString.Lazy as L
import qualified Data.Set as S import qualified Data.Set as S
import Control.Exception (throwIO)
import Control.Monad.Trans.Resource import Control.Monad.Trans.Resource
import Network.HTTP.Conduit import Network.HTTP.Conduit
import Network.HTTP.Client import Network.HTTP.Client
import Network.Socket
import Network.BSD (getProtocolNumber)
import Data.Either
import Data.Conduit import Data.Conduit
import Text.Read
import System.Log.Logger import System.Log.Logger
#if ! MIN_VERSION_http_client(0,5,0) #if ! MIN_VERSION_http_client(0,5,0)
@ -92,14 +100,17 @@ data UrlOptions = UrlOptions
} }
data UrlDownloader data UrlDownloader
= DownloadWithConduit = DownloadWithConduit NonHttpUrlDownloader
| DownloadWithCurl [CommandParam] | DownloadWithCurl [CommandParam]
data NonHttpUrlDownloader
= DownloadWithCurlRestricted Restriction
defUrlOptions :: IO UrlOptions defUrlOptions :: IO UrlOptions
defUrlOptions = UrlOptions defUrlOptions = UrlOptions
<$> pure Nothing <$> pure Nothing
<*> pure [] <*> pure []
<*> pure DownloadWithConduit <*> pure (DownloadWithConduit (DownloadWithCurlRestricted mempty))
<*> pure id <*> pure id
<*> newManager managerSettings <*> newManager managerSettings
<*> pure (S.fromList $ map mkScheme ["http", "https", "ftp"]) <*> pure (S.fromList $ map mkScheme ["http", "https", "ftp"])
@ -132,7 +143,7 @@ curlParams uo ps = ps ++ uaparams ++ headerparams ++ addedparams ++ schemeparams
Just ua -> [Param "--user-agent", Param ua] Just ua -> [Param "--user-agent", Param ua]
headerparams = concatMap (\h -> [Param "-H", Param h]) (reqHeaders uo) headerparams = concatMap (\h -> [Param "-H", Param h]) (reqHeaders uo)
addedparams = case urlDownloader uo of addedparams = case urlDownloader uo of
DownloadWithConduit -> [] DownloadWithConduit _ -> []
DownloadWithCurl l -> l DownloadWithCurl l -> l
schemeparams = schemeparams =
[ Param "--proto" [ Param "--proto"
@ -197,25 +208,28 @@ getUrlInfo :: URLString -> UrlOptions -> IO UrlInfo
getUrlInfo url uo = case parseURIRelaxed url of getUrlInfo url uo = case parseURIRelaxed url of
Just u -> checkPolicy uo u dne warnError $ Just u -> checkPolicy uo u dne warnError $
case (urlDownloader uo, parseUrlRequest (show u)) of case (urlDownloader uo, parseUrlRequest (show u)) of
(DownloadWithConduit, Just req) -> (DownloadWithConduit _, Just req) ->
existsconduit req existsconduit req
`catchNonAsync` (const $ return dne) `catchNonAsync` (const $ return dne)
(DownloadWithConduit, Nothing) (DownloadWithConduit (DownloadWithCurlRestricted r), Nothing)
| isfileurl u -> existsfile u | isfileurl u -> existsfile u
| isftpurl u -> existscurlrestricted r u 21
`catchNonAsync` (const $ return dne)
| otherwise -> do | otherwise -> do
unsupportedUrlScheme u warnError unsupportedUrlScheme u warnError
return dne return dne
(DownloadWithCurl _, _) (DownloadWithCurl _, _)
| isfileurl u -> existsfile u | isfileurl u -> existsfile u
| otherwise -> existscurl u | otherwise -> existscurl u basecurlparams
Nothing -> return dne Nothing -> return dne
where where
dne = UrlInfo False Nothing Nothing dne = UrlInfo False Nothing Nothing
found sz f = return $ UrlInfo True sz f found sz f = return $ UrlInfo True sz f
isfileurl u = uriScheme u == "file:" isfileurl u = uriScheme u == "file:"
isftpurl u = uriScheme u == "ftp:"
curlparams = curlParams uo $ basecurlparams = curlParams uo $
[ Param "-s" [ Param "-s"
, Param "--head" , Param "--head"
, Param "-L", Param url , Param "-L", Param url
@ -247,7 +261,7 @@ getUrlInfo url uo = case parseURIRelaxed url of
(extractfilename resp) (extractfilename resp)
else return dne else return dne
existscurl u = do existscurl u curlparams = do
output <- catchDefaultIO "" $ output <- catchDefaultIO "" $
readProcess "curl" $ toCommand curlparams readProcess "curl" $ toCommand curlparams
let len = extractlencurl output let len = extractlencurl output
@ -264,6 +278,9 @@ getUrlInfo url uo = case parseURIRelaxed url of
_ | isftp && isJust len -> good _ | isftp && isJust len -> good
_ -> return dne _ -> return dne
existscurlrestricted r u defport = existscurl u
=<< curlRestrictedParams r u defport basecurlparams
existsfile u = do existsfile u = do
let f = unEscapeString (uriPath u) let f = unEscapeString (uriPath u)
s <- catchMaybeIO $ getFileStatus f s <- catchMaybeIO $ getFileStatus f
@ -310,19 +327,22 @@ download' noerror meterupdate url file uo =
go = case parseURIRelaxed url of go = case parseURIRelaxed url of
Just u -> checkPolicy uo u False dlfailed $ Just u -> checkPolicy uo u False dlfailed $
case (urlDownloader uo, parseUrlRequest (show u)) of case (urlDownloader uo, parseUrlRequest (show u)) of
(DownloadWithConduit, Just req) -> (DownloadWithConduit _, Just req) ->
downloadconduit req downloadconduit req
(DownloadWithConduit, Nothing) (DownloadWithConduit (DownloadWithCurlRestricted r), Nothing)
| isfileurl u -> downloadfile u | isfileurl u -> downloadfile u
| isftpurl u -> downloadcurlrestricted r u 21
`catchNonAsync` (dlfailed . show)
| otherwise -> unsupportedUrlScheme u dlfailed | otherwise -> unsupportedUrlScheme u dlfailed
(DownloadWithCurl _, _) (DownloadWithCurl _, _)
| isfileurl u -> downloadfile u | isfileurl u -> downloadfile u
| otherwise -> downloadcurl | otherwise -> downloadcurl basecurlparams
Nothing -> do Nothing -> do
liftIO $ debugM "url" url liftIO $ debugM "url" url
dlfailed "invalid url" dlfailed "invalid url"
isfileurl u = uriScheme u == "file:" isfileurl u = uriScheme u == "file:"
isftpurl u = uriScheme u == "ftp:"
downloadconduit req = catchMaybeIO (getFileSize file) >>= \case downloadconduit req = catchMaybeIO (getFileSize file) >>= \case
Nothing -> runResourceT $ do Nothing -> runResourceT $ do
@ -405,20 +425,24 @@ download' noerror meterupdate url file uo =
sinkResponseFile meterupdate initialp file mode resp sinkResponseFile meterupdate initialp file mode resp
return True return True
downloadcurl = do basecurlparams = curlParams uo
[ if noerror
then Param "-S"
else Param "-sS"
, Param "-f"
, Param "-L"
, Param "-C", Param "-"
]
downloadcurl curlparams = do
-- curl does not create destination file -- curl does not create destination file
-- if the url happens to be empty, so pre-create. -- if the url happens to be empty, so pre-create.
unlessM (doesFileExist file) $ unlessM (doesFileExist file) $
writeFile file "" writeFile file ""
let ps = curlParams uo boolSystem "curl" (curlparams ++ [Param "-o", File file, File url])
[ if noerror
then Param "-S" downloadcurlrestricted r u defport =
else Param "-sS" downloadcurl =<< curlRestrictedParams r u defport basecurlparams
, Param "-f"
, Param "-L"
, Param "-C", Param "-"
]
boolSystem "curl" (ps ++ [Param "-o", File file, File url])
downloadfile u = do downloadfile u = do
let src = unEscapeString (uriPath u) let src = unEscapeString (uriPath u)
@ -554,3 +578,57 @@ matchHttpExceptionContent want e
| want e = Just e | want e = Just e
| otherwise = Nothing | otherwise = Nothing
#endif #endif
{- Constructs parameters that prevent curl from accessing any IP addresses
- blocked by the Restriction. These are added to the input parameters,
- which should tell curl what to do.
-
- This has to disable redirects because it looks up the IP addresses
- of the host and after limiting to those allowed by the Restriction,
- makes curl resolve the host to those IP addresses. It doesn't make sense
- to use this for http anyway, only for ftp or perhaps other protocols
- supported by curl.
-
- Throws an exception if the Restriction blocks all addresses, or
- if the dns lookup fails. A malformed url will also cause an exception.
-}
curlRestrictedParams :: Restriction -> URI -> Int -> [CommandParam] -> IO [CommandParam]
curlRestrictedParams r u defport ps = case uriAuthority u of
Nothing -> giveup "malformed url"
Just uath -> case uriPort uath of
"" -> go (uriRegName uath) defport
-- strict parser because the port we provide to curl
-- needs to match the port in the url
(':':s) -> case readMaybe s :: Maybe Int of
Just p -> go (uriRegName uath) p
Nothing -> giveup "malformed url"
_ -> giveup "malformed url"
where
go hostname p = do
proto <- getProtocolNumber "tcp"
let serv = show p
let hints = defaultHints
{ addrFlags = [AI_ADDRCONFIG]
, addrProtocol = proto
, addrSocketType = Stream
}
addrs <- getAddrInfo (Just hints) (Just hostname) (Just serv)
case partitionEithers (map checkrestriction addrs) of
((e:_es), []) -> throwIO e
(_, as)
| null as -> throwIO $
NC.HostNotResolved hostname
| otherwise -> return $
(limitresolve p) as ++ ps
checkrestriction addr = maybe (Right addr) Left $
checkAddressRestriction r addr
limitresolve p addrs =
[ Param "--resolve"
, Param $ "*:" ++ show p ++ ":" ++ intercalate ":"
(mapMaybe (bracketaddr <$$> extractIPAddress . addrAddress) addrs)
-- Don't let a ftp server provide an IP address.
, Param "--ftp-skip-pasv-ip"
-- Prevent all http redirects.
, Param "--max-redirs", Param "0"
]
bracketaddr a = "[" ++ a ++ "]"

View file

@ -50,3 +50,5 @@ ALL.chr2.phase3_sha 100%[===================>] 1.22G 6.99MB/s in 3m 11s
Would you know why this is happening? And how I can avoid this failure mode in the more up-to-date version of git-annex? Would you know why this is happening? And how I can avoid this failure mode in the more up-to-date version of git-annex?
> Added back support for ftp urls with no special configuration needed.
> [[done]] --[[Joey]]

View file

@ -0,0 +1,8 @@
[[!comment format=mdwn
username="joey"
subject="""comment 6"""
date="2019-05-30T18:50:18Z"
content="""
I've gotten secure ftp url downloading working now. Have not yet put back
in the handling of http-to-ftp redirects.
"""]]