S3: upload progress display

This commit is contained in:
Joey Hess 2012-11-18 22:20:43 -04:00
parent e2b7fc1ebd
commit b0e08ae457
2 changed files with 30 additions and 17 deletions

View file

@ -11,6 +11,7 @@ module Messages (
showAction,
showProgress,
metered,
meteredBytes,
showSideAction,
doSideAction,
doQuietSideAction,
@ -63,9 +64,17 @@ showProgress = handle q $
{- Shows a progress meter while performing a transfer of a key.
- The action is passed a callback to use to update the meter. -}
metered :: (Maybe MeterUpdate) -> Key -> (MeterUpdate -> Annex a) -> Annex a
metered combinemeterupdate key a = withOutputType $ go (keySize key)
metered combinemeterupdate key a = go (keySize key)
where
go (Just size) NormalOutput = do
go (Just size) = meteredBytes combinemeterupdate size a
go _ = a (const noop)
{- Shows a progress meter while performing an action on a given number
- of bytes. -}
meteredBytes :: (Maybe MeterUpdate) -> Integer -> (MeterUpdate -> Annex a) -> Annex a
meteredBytes combinemeterupdate size a = withOutputType go
where
go NormalOutput = do
progress <- liftIO $ newProgress "" size
meter <- liftIO $ newMeter progress "B" 25 (renderNums binaryOpts 1)
showOutput
@ -76,7 +85,7 @@ metered combinemeterupdate key a = withOutputType $ go (keySize key)
maybe noop (\m -> m n) combinemeterupdate
liftIO $ clearMeter stdout meter
return r
go _ _ = a (const noop)
go _ = a (const noop)
showSideAction :: String -> Annex ()
showSideAction m = Annex.getState Annex.output >>= go

View file

@ -24,6 +24,7 @@ import Remote.Helper.Special
import Remote.Helper.Encryptable
import Crypto
import Creds
import Meters
import Annex.Content
remote :: RemoteType
@ -111,38 +112,41 @@ s3Setup u c = handlehost $ M.lookup "host" c
M.delete "bucket" defaults
store :: Remote -> Key -> AssociatedFile -> MeterUpdate -> Annex Bool
store r k _f _p = s3Action r False $ \(conn, bucket) -> do
store r k _f p = s3Action r False $ \(conn, bucket) -> do
src <- inRepo $ gitAnnexLocation k
res <- liftIO $ storeHelper (conn, bucket) r k src
res <- storeHelper (conn, bucket) r k p src
s3Bool res
storeEncrypted :: Remote -> (Cipher, Key) -> Key -> MeterUpdate -> Annex Bool
storeEncrypted r (cipher, enck) k _p = s3Action r False $ \(conn, bucket) ->
storeEncrypted r (cipher, enck) k p = s3Action r False $ \(conn, bucket) ->
-- To get file size of the encrypted content, have to use a temp file.
-- (An alternative would be chunking to to a constant size.)
withTmp enck $ \tmp -> do
f <- inRepo $ gitAnnexLocation k
liftIO $ encrypt cipher (feedFile f) $
readBytes $ L.writeFile tmp
res <- liftIO $ storeHelper (conn, bucket) r enck tmp
res <- storeHelper (conn, bucket) r enck p tmp
s3Bool res
storeHelper :: (AWSConnection, String) -> Remote -> Key -> FilePath -> IO (AWSResult ())
storeHelper (conn, bucket) r k file = do
content <- liftIO $ L.readFile file
-- size is provided to S3 so the whole content does not need to be
-- buffered to calculate it
storeHelper :: (AWSConnection, String) -> Remote -> Key -> MeterUpdate -> FilePath -> Annex (AWSResult ())
storeHelper (conn, bucket) r k p file = do
size <- maybe getsize (return . fromIntegral) $ keySize k
let object = setStorageClass storageclass $
S3Object bucket (bucketFile r k) ""
(("Content-Length", show size) : xheaders) content
sendObject conn object
meteredBytes (Just p) size $ \meterupdate ->
liftIO $ withMeteredFile file meterupdate $ \content -> do
-- size is provided to S3 so the whole content
-- does not need to be buffered to calculate it
let object = setStorageClass storageclass $ S3Object
bucket (bucketFile r k) ""
(("Content-Length", show size) : xheaders)
content
sendObject conn object
where
storageclass =
case fromJust $ M.lookup "storageclass" $ fromJust $ config r of
"REDUCED_REDUNDANCY" -> REDUCED_REDUNDANCY
_ -> STANDARD
getsize = fileSize <$> (liftIO $ getFileStatus file)
getsize = liftIO $ fromIntegral . fileSize <$> getFileStatus file
xheaders = filter isxheader $ M.assocs $ fromJust $ config r
isxheader (h, _) = "x-amz-" `isPrefixOf` h