webapp: Progess bar fixes for many types of special remotes.
There was confusion in different parts of the progress bar code about whether an update contained the total number of bytes transferred, or the number of bytes transferred since the last update. One way this bug showed up was progress bars that seemed to stick at zero for a long time. In order to fix it comprehensively, I add a new BytesProcessed data type, that is explicitly a total quantity of bytes, not a delta. Note that this doesn't necessarily fix every problem with progress bars. Particularly, buffering can now cause progress bars to seem to run ahead of transfers, reaching 100% when data is still being uploaded.
This commit is contained in:
parent
577128e9b8
commit
cf07a2c412
24 changed files with 172 additions and 129 deletions
|
@ -29,6 +29,7 @@ import Data.ByteString.Lazy.UTF8 (fromString)
|
|||
import Data.Digest.Pure.SHA
|
||||
import Utility.UserInfo
|
||||
import Annex.Content
|
||||
import Utility.Metered
|
||||
|
||||
type BupRepo = String
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ import Remote.Helper.Encryptable
|
|||
import Remote.Helper.Chunked
|
||||
import Crypto
|
||||
import Annex.Content
|
||||
import Meters
|
||||
import Utility.Metered
|
||||
|
||||
remote :: RemoteType
|
||||
remote = RemoteType {
|
||||
|
@ -154,17 +154,20 @@ storeSplit' :: MeterUpdate -> Int64 -> [FilePath] -> [S.ByteString] -> [FilePath
|
|||
storeSplit' _ _ [] _ _ = error "ran out of dests"
|
||||
storeSplit' _ _ _ [] c = return $ reverse c
|
||||
storeSplit' meterupdate chunksize (d:dests) bs c = do
|
||||
bs' <- E.bracket (openFile d WriteMode) hClose (feed chunksize bs)
|
||||
bs' <- E.bracket (openFile d WriteMode) hClose $
|
||||
feed zeroBytesProcessed chunksize bs
|
||||
storeSplit' meterupdate chunksize dests bs' (d:c)
|
||||
where
|
||||
feed _ [] _ = return []
|
||||
feed sz (l:ls) h = do
|
||||
let s = fromIntegral $ S.length l
|
||||
feed _ _ [] _ = return []
|
||||
feed bytes sz (l:ls) h = do
|
||||
let len = S.length l
|
||||
let s = fromIntegral len
|
||||
if s <= sz || sz == chunksize
|
||||
then do
|
||||
S.hPut h l
|
||||
meterupdate $ toInteger s
|
||||
feed (sz - s) ls h
|
||||
let bytes' = addBytesProcessed bytes len
|
||||
meterupdate bytes'
|
||||
feed bytes' (sz - s) ls h
|
||||
else return (l:ls)
|
||||
|
||||
storeHelper :: FilePath -> ChunkSize -> Key -> ([FilePath] -> IO [FilePath]) -> Annex Bool
|
||||
|
|
|
@ -40,6 +40,7 @@ import Init
|
|||
import Types.Key
|
||||
import qualified Fields
|
||||
import Logs.Location
|
||||
import Utility.Metered
|
||||
|
||||
import Control.Concurrent
|
||||
import Control.Concurrent.MSampleVar
|
||||
|
@ -309,7 +310,7 @@ copyFromRemote r key file dest
|
|||
: maybe [] (\f -> [(Fields.associatedFile, f)]) file
|
||||
Just (cmd, params) <- git_annex_shell (repo r) "transferinfo"
|
||||
[Param $ key2file key] fields
|
||||
v <- liftIO $ newEmptySV
|
||||
v <- liftIO $ (newEmptySV :: IO (MSampleVar Integer))
|
||||
tid <- liftIO $ forkIO $ void $ tryIO $ do
|
||||
bytes <- readSV v
|
||||
p <- createProcess $
|
||||
|
@ -325,7 +326,7 @@ copyFromRemote r key file dest
|
|||
send bytes
|
||||
forever $
|
||||
send =<< readSV v
|
||||
let feeder = writeSV v
|
||||
let feeder = writeSV v . fromBytesProcessed
|
||||
bracketIO noop (const $ tryIO $ killThread tid) (a feeder)
|
||||
|
||||
copyFromRemoteCheap :: Remote -> Key -> FilePath -> Annex Bool
|
||||
|
@ -391,13 +392,13 @@ rsyncOrCopyFile rsyncparams src dest p =
|
|||
dorsync = rsyncHelper (Just p) $
|
||||
rsyncparams ++ [Param src, Param dest]
|
||||
docopy = liftIO $ bracket
|
||||
(forkIO $ watchfilesize 0)
|
||||
(forkIO $ watchfilesize zeroBytesProcessed)
|
||||
(void . tryIO . killThread)
|
||||
(const $ copyFileExternal src dest)
|
||||
watchfilesize oldsz = do
|
||||
threadDelay 500000 -- 0.5 seconds
|
||||
v <- catchMaybeIO $
|
||||
fromIntegral . fileSize
|
||||
toBytesProcessed . fileSize
|
||||
<$> getFileStatus dest
|
||||
case v of
|
||||
Just sz
|
||||
|
|
|
@ -22,7 +22,7 @@ import Remote.Helper.Encryptable
|
|||
import qualified Remote.Helper.AWS as AWS
|
||||
import Crypto
|
||||
import Creds
|
||||
import Meters
|
||||
import Utility.Metered
|
||||
import qualified Annex
|
||||
import Annex.Content
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ module Remote.Helper.Chunked where
|
|||
import Common.Annex
|
||||
import Utility.DataUnits
|
||||
import Types.Remote
|
||||
import Meters
|
||||
import Utility.Metered
|
||||
|
||||
import qualified Data.Map as M
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
|
|
|
@ -15,6 +15,7 @@ import Crypto
|
|||
import qualified Annex
|
||||
import Config.Cost
|
||||
import Utility.Base64
|
||||
import Utility.Metered
|
||||
|
||||
{- Encryption setup for a remote. The user must specify whether to use
|
||||
- an encryption key, or not encrypt. An encrypted cipher is created, or is
|
||||
|
|
|
@ -21,6 +21,7 @@ import Annex.Content
|
|||
import Remote.Helper.Special
|
||||
import Remote.Helper.Encryptable
|
||||
import Crypto
|
||||
import Utility.Metered
|
||||
|
||||
remote :: RemoteType
|
||||
remote = RemoteType {
|
||||
|
|
|
@ -22,6 +22,7 @@ import Remote.Helper.Encryptable
|
|||
import Crypto
|
||||
import Utility.Rsync
|
||||
import Utility.CopyFile
|
||||
import Utility.Metered
|
||||
import Annex.Perms
|
||||
|
||||
type RsyncUrl = String
|
||||
|
|
|
@ -27,7 +27,7 @@ import Remote.Helper.Encryptable
|
|||
import qualified Remote.Helper.AWS as AWS
|
||||
import Crypto
|
||||
import Creds
|
||||
import Meters
|
||||
import Utility.Metered
|
||||
import Annex.Content
|
||||
|
||||
remote :: RemoteType
|
||||
|
|
|
@ -17,6 +17,7 @@ import Config.Cost
|
|||
import Logs.Web
|
||||
import qualified Utility.Url as Url
|
||||
import Types.Key
|
||||
import Utility.Metered
|
||||
|
||||
import qualified Data.Map as M
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ import Remote.Helper.Encryptable
|
|||
import Remote.Helper.Chunked
|
||||
import Crypto
|
||||
import Creds
|
||||
import Meters
|
||||
import Utility.Metered
|
||||
import Annex.Content
|
||||
|
||||
type DavUrl = String
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue