download urls via tmp file, and support resuming

This commit is contained in:
Joey Hess 2011-07-01 18:46:07 -04:00
parent 5d154b8436
commit ace9de37e8
3 changed files with 23 additions and 10 deletions

View file

@ -16,9 +16,13 @@ import Command
import qualified Backend
import qualified Remote.Web
import qualified Command.Add
import qualified Annex
import Messages
import Content
import PresenceLog
import Types.Key
import Locations
import Utility
command :: [Command]
command = [repoCommand "addurl" paramPath seek "add urls to annex"]
@ -38,16 +42,20 @@ start s = do
perform :: String -> FilePath -> CommandPerform
perform url file = do
[(_, backend)] <- Backend.chooseBackends [file]
g <- Annex.gitRepo
showNote $ "downloading " ++ url
ok <- Remote.Web.download file [url]
let dummykey = stubKey { keyName = url, keyBackendName = "URL" }
let tmp = gitAnnexTmpLocation g dummykey
liftIO $ createDirectoryIfMissing True (parentDir tmp)
ok <- Remote.Web.download [url] tmp
if ok
then do
stored <- Backend.storeFileKey file backend
[(_, backend)] <- Backend.chooseBackends [file]
stored <- Backend.storeFileKey tmp backend
case stored of
Nothing -> stop
Just (key, _) -> do
moveAnnex key file
moveAnnex key tmp
Remote.Web.setUrl key url InfoPresent
next $ Command.Add.cleanup file key
else stop

View file

@ -82,7 +82,9 @@ setUrl key url status = do
logChange g key webUUID (if null us then InfoMissing else InfoPresent)
downloadKey :: Key -> FilePath -> Annex Bool
downloadKey key file = download file =<< getUrls key
downloadKey key file = do
us <- getUrls key
download us file
uploadKey :: Key -> Annex Bool
uploadKey _ = do
@ -116,9 +118,9 @@ urlexists url = do
res <- perform curl
return $ res == CurlOK
download :: FilePath -> [URLString] -> Annex Bool
download _ [] = return False
download file (url:us) = do
download :: [URLString] -> FilePath -> Annex Bool
download [] _ = return False
download (url:us) file = do
showProgress -- make way for curl progress bar
ok <- liftIO $ boolSystem "curl" [Params "-# -o", File file, File url]
if ok then return ok else download file us
ok <- liftIO $ boolSystem "curl" [Params "-C - -# -o", File file, File url]
if ok then return ok else download us file

View file

@ -7,4 +7,7 @@
Whoops! You'd only told me O(N) twice before..
So this is not too high priority. I think I would like to get the per-remote storage sorted out anyway, since probably it will be the thing needed to convert the URL backend into a special remote, which would then allow ripping out the otherwise unused pluggable backend infrastructure.
Update: Per-remote storage is now sorted out, so this could be implemented
if it actually made sense to do so.
"""]]