git-annex/Remote/S3.hs

273 lines
8.2 KiB
Haskell
Raw Normal View History

2011-03-28 02:00:44 +00:00
{- Amazon S3 remotes.
-
- Copyright 2011 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Remote.S3 (remote) where
2011-03-28 02:00:44 +00:00
import Network.AWS.AWSConnection
import Network.AWS.S3Object
import Network.AWS.S3Bucket hiding (size)
import Network.AWS.AWSResult
import qualified Data.Text as T
import qualified Data.ByteString.Lazy.Char8 as L
2011-03-29 17:49:54 +00:00
import qualified Data.Map as M
import Data.Char
2011-03-28 02:00:44 +00:00
2011-10-05 20:02:51 +00:00
import Common.Annex
import Types.Remote
import Types.Key
import qualified Git
2011-03-30 19:15:46 +00:00
import Config
import Config.Cost
2011-08-17 00:49:54 +00:00
import Remote.Helper.Special
import Remote.Helper.Encryptable
import qualified Remote.Helper.AWS as AWS
import Crypto
2012-11-14 23:32:27 +00:00
import Creds
import Utility.Metered
2011-10-04 04:40:47 +00:00
import Annex.Content
2011-03-28 02:00:44 +00:00
remote :: RemoteType
remote = RemoteType {
2011-03-29 18:55:59 +00:00
typename = "S3",
2011-03-30 18:00:54 +00:00
enumerate = findSpecialRemotes "s3",
generate = gen,
2011-03-29 18:55:59 +00:00
setup = s3Setup
}
2011-03-29 03:51:07 +00:00
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex Remote
gen r u c gc = new <$> remoteCost gc expensiveRemoteCost
2012-11-30 04:55:59 +00:00
where
new cst = encryptableRemote c
(storeEncrypted this)
(retrieveEncrypted this)
this
2012-11-30 04:55:59 +00:00
where
this = Remote {
uuid = u,
cost = cst,
name = Git.repoDescribe r,
2012-12-13 04:45:27 +00:00
storeKey = store this,
2012-11-30 04:55:59 +00:00
retrieveKeyFile = retrieve this,
retrieveKeyFileCheap = retrieveCheap this,
removeKey = remove this,
hasKey = checkPresent this,
hasKeyCheap = False,
whereisKey = Nothing,
config = c,
repo = r,
gitconfig = gc,
2012-11-30 04:55:59 +00:00
localpath = Nothing,
readonly = False,
2013-03-15 23:16:13 +00:00
globallyAvailable = True,
2012-11-30 04:55:59 +00:00
remotetype = remote
}
2011-04-15 19:09:36 +00:00
s3Setup :: UUID -> RemoteConfig -> Annex RemoteConfig
s3Setup u c = handlehost $ M.lookup "host" c
2012-11-11 04:51:07 +00:00
where
remotename = fromJust (M.lookup "name" c)
defbucket = remotename ++ "-" ++ fromUUID u
defaults = M.fromList
[ ("datacenter", T.unpack $ AWS.defaultRegion AWS.S3)
2012-11-11 04:51:07 +00:00
, ("storageclass", "STANDARD")
, ("host", defaultAmazonS3Host)
, ("port", show defaultAmazonS3Port)
, ("bucket", defbucket)
]
2012-11-11 04:51:07 +00:00
handlehost Nothing = defaulthost
handlehost (Just h)
| ".archive.org" `isSuffixOf` map toLower h = archiveorg
| otherwise = defaulthost
2012-11-11 04:51:07 +00:00
use fullconfig = do
gitConfigSpecialRemote u fullconfig "s3" "true"
setRemoteCredPair fullconfig (AWS.creds u)
2012-11-11 04:51:07 +00:00
defaulthost = do
c' <- encryptionSetup c
let fullconfig = c' `M.union` defaults
genBucket fullconfig u
use fullconfig
2012-11-11 04:51:07 +00:00
archiveorg = do
showNote "Internet Archive mode"
maybe (error "specify bucket=") (const noop) $
M.lookup "bucket" archiveconfig
use archiveconfig
where
archiveconfig =
-- hS3 does not pass through x-archive-* headers
M.mapKeys (replace "x-archive-" "x-amz-") $
-- encryption does not make sense here
M.insert "encryption" "none" $
M.union c $
-- special constraints on key names
M.insert "mungekeys" "ia" $
-- bucket created only when files are uploaded
M.insert "x-amz-auto-make-bucket" "1" $
-- no default bucket name; should be human-readable
M.delete "bucket" defaults
2011-03-29 20:21:21 +00:00
2012-09-21 18:50:14 +00:00
store :: Remote -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
store r k _f p = s3Action r False $ \(conn, bucket) ->
sendAnnex k (void $ remove r k) $ \src -> do
res <- storeHelper (conn, bucket) r k p src
s3Bool res
2012-09-21 18:50:14 +00:00
storeEncrypted :: Remote -> (Cipher, Key) -> Key -> MeterUpdate -> Annex Bool
2012-11-19 02:20:43 +00:00
storeEncrypted r (cipher, enck) k p = s3Action r False $ \(conn, bucket) ->
-- To get file size of the encrypted content, have to use a temp file.
-- (An alternative would be chunking to to a constant size.)
withTmp enck $ \tmp -> sendAnnex k (void $ remove r enck) $ \src -> do
liftIO $ encrypt (getGpgOpts r) cipher (feedFile src) $
readBytes $ L.writeFile tmp
2012-11-19 02:20:43 +00:00
res <- storeHelper (conn, bucket) r enck p tmp
s3Bool res
2012-11-19 02:20:43 +00:00
storeHelper :: (AWSConnection, String) -> Remote -> Key -> MeterUpdate -> FilePath -> Annex (AWSResult ())
storeHelper (conn, bucket) r k p file = do
2011-05-15 06:49:43 +00:00
size <- maybe getsize (return . fromIntegral) $ keySize k
2012-11-19 02:20:43 +00:00
meteredBytes (Just p) size $ \meterupdate ->
liftIO $ withMeteredFile file meterupdate $ \content -> do
-- size is provided to S3 so the whole content
-- does not need to be buffered to calculate it
let object = setStorageClass storageclass $ S3Object
bucket (bucketFile r k) ""
(("Content-Length", show size) : xheaders)
content
sendObject conn object
2012-11-11 04:51:07 +00:00
where
storageclass =
2012-11-30 04:55:59 +00:00
case fromJust $ M.lookup "storageclass" $ config r of
2012-11-11 04:51:07 +00:00
"REDUCED_REDUNDANCY" -> REDUCED_REDUNDANCY
_ -> STANDARD
2012-11-19 02:20:43 +00:00
getsize = liftIO $ fromIntegral . fileSize <$> getFileStatus file
2012-11-11 04:51:07 +00:00
2012-11-30 04:55:59 +00:00
xheaders = filter isxheader $ M.assocs $ config r
2012-11-11 04:51:07 +00:00
isxheader (h, _) = "x-amz-" `isPrefixOf` h
retrieve :: Remote -> Key -> AssociatedFile -> FilePath -> Annex Bool
retrieve r k _f d = s3Action r False $ \(conn, bucket) ->
metered Nothing k $ \meterupdate -> do
res <- liftIO $ getObject conn $ bucketKey r bucket k
case res of
Right o -> do
liftIO $ meteredWriteFile meterupdate d $
obj_data o
return True
Left e -> s3Warning e
retrieveCheap :: Remote -> Key -> FilePath -> Annex Bool
retrieveCheap _ _ _ = return False
retrieveEncrypted :: Remote -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
retrieveEncrypted r (cipher, enck) k d = s3Action r False $ \(conn, bucket) ->
metered Nothing k $ \meterupdate -> do
res <- liftIO $ getObject conn $ bucketKey r bucket enck
case res of
Right o -> liftIO $ decrypt cipher (\h -> meteredWrite meterupdate h $ obj_data o) $
readBytes $ \content -> do
L.writeFile d content
return True
Left e -> s3Warning e
2011-12-31 08:11:39 +00:00
remove :: Remote -> Key -> Annex Bool
remove r k = s3Action r False $ \(conn, bucket) -> do
res <- liftIO $ deleteObject conn $ bucketKey r bucket k
2011-04-19 18:50:09 +00:00
s3Bool res
2011-12-31 08:11:39 +00:00
checkPresent :: Remote -> Key -> Annex (Either String Bool)
2011-04-19 18:50:09 +00:00
checkPresent r k = s3Action r noconn $ \(conn, bucket) -> do
showAction $ "checking " ++ name r
res <- liftIO $ getObjectInfo conn $ bucketKey r bucket k
2011-03-29 22:21:05 +00:00
case res of
2011-04-19 18:50:09 +00:00
Right _ -> return $ Right True
Left (AWSError _ _) -> return $ Right False
Left e -> return $ Left (s3Error e)
2012-11-11 04:51:07 +00:00
where
noconn = Left $ error "S3 not configured"
s3Warning :: ReqError -> Annex Bool
s3Warning e = do
warning $ prettyReqError e
return False
s3Error :: ReqError -> a
s3Error e = error $ prettyReqError e
s3Bool :: AWSResult () -> Annex Bool
2011-07-15 16:47:14 +00:00
s3Bool (Right _) = return True
s3Bool (Left e) = s3Warning e
2011-04-19 18:50:09 +00:00
2011-12-31 08:11:39 +00:00
s3Action :: Remote -> a -> ((AWSConnection, String) -> Annex a) -> Annex a
s3Action r noconn action = do
2012-11-30 04:55:59 +00:00
let bucket = M.lookup "bucket" $ config r
conn <- s3Connection (config r) (uuid r)
case (bucket, conn) of
(Just b, Just c) -> action (c, b)
_ -> return noconn
2011-12-31 08:11:39 +00:00
bucketFile :: Remote -> Key -> FilePath
bucketFile r = munge . key2file
2012-11-11 04:51:07 +00:00
where
munge s = case M.lookup "mungekeys" c of
Just "ia" -> iaMunge $ fileprefix ++ s
_ -> fileprefix ++ s
fileprefix = M.findWithDefault "" "fileprefix" c
2012-11-30 04:55:59 +00:00
c = config r
2011-12-31 08:11:39 +00:00
bucketKey :: Remote -> String -> Key -> S3Object
bucketKey r bucket k = S3Object bucket (bucketFile r k) "" [] L.empty
{- Internet Archive limits filenames to a subset of ascii,
- with no whitespace. Other characters are xml entity
- encoded. -}
iaMunge :: String -> String
2011-05-16 18:49:28 +00:00
iaMunge = (>>= munge)
2012-11-11 04:51:07 +00:00
where
munge c
| isAsciiUpper c || isAsciiLower c || isNumber c = [c]
| c `elem` "_-.\"" = [c]
| isSpace c = []
| otherwise = "&" ++ show (ord c) ++ ";"
genBucket :: RemoteConfig -> UUID -> Annex ()
genBucket c u = do
conn <- s3ConnectionRequired c u
showAction "checking bucket"
2011-05-16 13:42:54 +00:00
loc <- liftIO $ getBucketLocation conn bucket
case loc of
2012-04-22 03:32:33 +00:00
Right _ -> noop
2011-05-16 13:42:54 +00:00
Left err@(NetworkError _) -> s3Error err
Left (AWSError _ _) -> do
showAction $ "creating bucket in " ++ datacenter
2011-05-16 13:42:54 +00:00
res <- liftIO $ createBucketIn conn bucket datacenter
case res of
2012-04-22 03:32:33 +00:00
Right _ -> noop
2011-05-16 13:42:54 +00:00
Left err -> s3Error err
2012-11-11 04:51:07 +00:00
where
bucket = fromJust $ M.lookup "bucket" c
datacenter = fromJust $ M.lookup "datacenter" c
2011-05-16 13:42:54 +00:00
s3ConnectionRequired :: RemoteConfig -> UUID -> Annex AWSConnection
s3ConnectionRequired c u =
maybe (error "Cannot connect to S3") return =<< s3Connection c u
2011-04-19 18:50:09 +00:00
s3Connection :: RemoteConfig -> UUID -> Annex (Maybe AWSConnection)
s3Connection c u = go =<< getRemoteCredPairFor "S3" c (AWS.creds u)
2012-11-11 04:51:07 +00:00
where
go Nothing = return Nothing
2012-11-14 23:32:27 +00:00
go (Just (ak, sk)) = return $ Just $ AWSConnection host port ak sk
2012-11-11 04:51:07 +00:00
host = fromJust $ M.lookup "host" c
port = let s = fromJust $ M.lookup "port" c in
case reads s of
[(p, _)] -> p
_ -> error $ "bad S3 port value: " ++ s