download urls via tmp file, and support resuming

This commit is contained in:
Joey Hess 2011-07-01 18:46:07 -04:00
parent 5d154b8436
commit ace9de37e8
3 changed files with 23 additions and 10 deletions

View file

@ -16,9 +16,13 @@ import Command
import qualified Backend import qualified Backend
import qualified Remote.Web import qualified Remote.Web
import qualified Command.Add import qualified Command.Add
import qualified Annex
import Messages import Messages
import Content import Content
import PresenceLog import PresenceLog
import Types.Key
import Locations
import Utility
command :: [Command] command :: [Command]
command = [repoCommand "addurl" paramPath seek "add urls to annex"] command = [repoCommand "addurl" paramPath seek "add urls to annex"]
@ -38,16 +42,20 @@ start s = do
perform :: String -> FilePath -> CommandPerform perform :: String -> FilePath -> CommandPerform
perform url file = do perform url file = do
[(_, backend)] <- Backend.chooseBackends [file] g <- Annex.gitRepo
showNote $ "downloading " ++ url showNote $ "downloading " ++ url
ok <- Remote.Web.download file [url] let dummykey = stubKey { keyName = url, keyBackendName = "URL" }
let tmp = gitAnnexTmpLocation g dummykey
liftIO $ createDirectoryIfMissing True (parentDir tmp)
ok <- Remote.Web.download [url] tmp
if ok if ok
then do then do
stored <- Backend.storeFileKey file backend [(_, backend)] <- Backend.chooseBackends [file]
stored <- Backend.storeFileKey tmp backend
case stored of case stored of
Nothing -> stop Nothing -> stop
Just (key, _) -> do Just (key, _) -> do
moveAnnex key file moveAnnex key tmp
Remote.Web.setUrl key url InfoPresent Remote.Web.setUrl key url InfoPresent
next $ Command.Add.cleanup file key next $ Command.Add.cleanup file key
else stop else stop

View file

@ -82,7 +82,9 @@ setUrl key url status = do
logChange g key webUUID (if null us then InfoMissing else InfoPresent) logChange g key webUUID (if null us then InfoMissing else InfoPresent)
downloadKey :: Key -> FilePath -> Annex Bool downloadKey :: Key -> FilePath -> Annex Bool
downloadKey key file = download file =<< getUrls key downloadKey key file = do
us <- getUrls key
download us file
uploadKey :: Key -> Annex Bool uploadKey :: Key -> Annex Bool
uploadKey _ = do uploadKey _ = do
@ -116,9 +118,9 @@ urlexists url = do
res <- perform curl res <- perform curl
return $ res == CurlOK return $ res == CurlOK
download :: FilePath -> [URLString] -> Annex Bool download :: [URLString] -> FilePath -> Annex Bool
download _ [] = return False download [] _ = return False
download file (url:us) = do download (url:us) file = do
showProgress -- make way for curl progress bar showProgress -- make way for curl progress bar
ok <- liftIO $ boolSystem "curl" [Params "-# -o", File file, File url] ok <- liftIO $ boolSystem "curl" [Params "-C - -# -o", File file, File url]
if ok then return ok else download file us if ok then return ok else download us file

View file

@ -7,4 +7,7 @@
Whoops! You'd only told me O(N) twice before.. Whoops! You'd only told me O(N) twice before..
So this is not too high priority. I think I would like to get the per-remote storage sorted out anyway, since probably it will be the thing needed to convert the URL backend into a special remote, which would then allow ripping out the otherwise unused pluggable backend infrastructure. So this is not too high priority. I think I would like to get the per-remote storage sorted out anyway, since probably it will be the thing needed to convert the URL backend into a special remote, which would then allow ripping out the otherwise unused pluggable backend infrastructure.
Update: Per-remote storage is now sorted out, so this could be implemented
if it actually made sense to do so.
"""]] """]]