S3: Fix encoding when generating public urls of S3 objects.

This code feels worryingly stringily typed, but using URI does not help
because the uriPath still has to be constructed with the right
uri-encoding.
This commit is contained in:
Joey Hess 2019-08-15 12:55:48 -04:00
parent 83fc72653b
commit 708fc6567f
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
3 changed files with 17 additions and 6 deletions

View file

@ -27,6 +27,7 @@ git-annex (7.20190731) UNRELEASED; urgency=medium
and on Windows. If your repository was set up by an old version and on Windows. If your repository was set up by an old version
of git-annex that omitted the hooks, you can simply re-run git-annex init of git-annex that omitted the hooks, you can simply re-run git-annex init
to install them. to install them.
* S3: Fix encoding when generating public urls of S3 objects.
-- Joey Hess <id@joeyh.name> Thu, 01 Aug 2019 00:11:56 -0400 -- Joey Hess <id@joeyh.name> Thu, 01 Aug 2019 00:11:56 -0400

View file

@ -29,6 +29,7 @@ import Network.Socket (HostName)
import Network.HTTP.Conduit (Manager) import Network.HTTP.Conduit (Manager)
import Network.HTTP.Client (responseStatus, responseBody, RequestBody(..)) import Network.HTTP.Client (responseStatus, responseBody, RequestBody(..))
import Network.HTTP.Types import Network.HTTP.Types
import Network.URI
import Control.Monad.Trans.Resource import Control.Monad.Trans.Resource
import Control.Monad.Catch import Control.Monad.Catch
import Data.IORef import Data.IORef
@ -387,15 +388,15 @@ retrieveExportS3 hv r info _k loc f p =
where where
go = withS3Handle hv $ \case go = withS3Handle hv $ \case
Just h -> do Just h -> do
retrieveHelper info h (Left (T.pack exporturl)) f p retrieveHelper info h (Left (T.pack exportloc)) f p
return True return True
Nothing -> case getPublicUrlMaker info of Nothing -> case getPublicUrlMaker info of
Nothing -> do Nothing -> do
warning $ needS3Creds (uuid r) warning $ needS3Creds (uuid r)
return False return False
Just geturl -> Url.withUrlOptions $ Just geturl -> Url.withUrlOptions $
liftIO . Url.download p (geturl exporturl) f liftIO . Url.download p (geturl exportloc) f
exporturl = bucketExportLocation info loc exportloc = bucketExportLocation info loc
removeExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> Annex Bool removeExportS3 :: S3HandleVar -> Remote -> S3Info -> Key -> ExportLocation -> Annex Bool
removeExportS3 hv r info k loc = withS3Handle hv $ \case removeExportS3 hv r info k loc = withS3Handle hv $ \case
@ -928,7 +929,14 @@ awsPublicUrl info = genericPublicUrl $
"https://" ++ T.unpack (bucket info) ++ ".s3.amazonaws.com/" "https://" ++ T.unpack (bucket info) ++ ".s3.amazonaws.com/"
genericPublicUrl :: URLString -> BucketObject -> URLString genericPublicUrl :: URLString -> BucketObject -> URLString
genericPublicUrl baseurl p = baseurl Posix.</> p genericPublicUrl baseurl p =
baseurl Posix.</> escapeURIString skipescape p
where
-- Don't need to escape '/' because the bucket object
-- is not necessarily a single url component.
-- But do want to escape eg '+' and ' '
skipescape '/' = True
skipescape c = isUnescapedInURIComponent c
genCredentials :: CredPair -> IO AWS.Credentials genCredentials :: CredPair -> IO AWS.Credentials
genCredentials (keyid, secret) = AWS.Credentials genCredentials (keyid, secret) = AWS.Credentials
@ -1126,8 +1134,8 @@ eitherS3VersionID info u c k fallback
| otherwise = return (Right (Left fallback)) | otherwise = return (Right (Left fallback))
s3VersionIDPublicUrl :: (S3Info -> BucketObject -> URLString) -> S3Info -> S3VersionID -> URLString s3VersionIDPublicUrl :: (S3Info -> BucketObject -> URLString) -> S3Info -> S3VersionID -> URLString
s3VersionIDPublicUrl mk info (S3VersionID obj vid) = mk info $ concat s3VersionIDPublicUrl mk info (S3VersionID obj vid) = concat
[ T.unpack obj [ mk info (T.unpack obj)
, "?versionId=" , "?versionId="
, T.unpack vid -- version ID is "url ready" so no escaping needed , T.unpack vid -- version ID is "url ready" so no escaping needed
] ]

View file

@ -17,3 +17,5 @@ frustratingly vague.
Experimentally, even url-encoding alphanumerics works in these urls. Experimentally, even url-encoding alphanumerics works in these urls.
So it would be ok to use a standard URI encoder. So it would be ok to use a standard URI encoder.
> [[fixed|done]] --[[Joey]]