git-annex/Command/AddUrl.hs

409 lines
14 KiB
Haskell
Raw Normal View History

2011-07-01 21:15:46 +00:00
{- git-annex command
-
- Copyright 2011-2014 Joey Hess <id@joeyh.name>
2011-07-01 21:15:46 +00:00
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Command.AddUrl where
import Network.URI
import Command
import Backend
import qualified Annex
import qualified Annex.Url as Url
import qualified Backend.URL
import qualified Remote
import qualified Types.Remote as Remote
2015-12-22 17:23:33 +00:00
import qualified Command.Add
2011-10-04 04:40:47 +00:00
import Annex.Content
2015-12-22 17:23:33 +00:00
import Annex.Ingest
import Annex.CheckIgnore
2014-12-17 17:57:52 +00:00
import Annex.UUID
import Logs.Web
import Types.KeySource
import Types.UrlContents
import Annex.FileMatcher
import Logs.Location
import Utility.Metered
import qualified Annex.Transfer as Transfer
import Annex.Quvi
import qualified Utility.Quvi as Quvi
2011-07-01 21:15:46 +00:00
cmd :: Command
cmd = notBareRepo $ withGlobalOptions [jobsOption, jsonOption, jsonProgressOption] $
command "addurl" SectionCommon "add urls to annex"
2015-07-13 14:57:49 +00:00
(paramRepeating paramUrl) (seek <$$> optParser)
2015-07-13 14:57:49 +00:00
data AddUrlOptions = AddUrlOptions
{ addUrls :: CmdParams
, fileOption :: Maybe FilePath
, pathdepthOption :: Maybe Int
, prefixOption :: Maybe String
, suffixOption :: Maybe String
2015-07-13 14:57:49 +00:00
, relaxedOption :: Bool
, rawOption :: Bool
2015-12-21 16:57:13 +00:00
, batchOption :: BatchMode
2015-12-22 16:20:39 +00:00
, batchFilesOption :: Bool
2015-07-13 14:57:49 +00:00
}
2011-07-01 21:15:46 +00:00
2015-07-13 14:57:49 +00:00
optParser :: CmdParamsDesc -> Parser AddUrlOptions
optParser desc = AddUrlOptions
<$> cmdParams desc
<*> optional (strOption
( long "file" <> metavar paramFile
<> help "specify what file the url is added to"
))
<*> optional (option auto
( long "pathdepth" <> metavar paramNumber
<> help "number of url path components to use in filename"
))
<*> optional (strOption
( long "prefix" <> metavar paramValue
<> help "add a prefix to the filename"
))
<*> optional (strOption
( long "suffix" <> metavar paramValue
<> help "add a suffix to the filename"
2015-07-13 14:57:49 +00:00
))
2015-07-13 15:06:41 +00:00
<*> parseRelaxedOption
<*> parseRawOption
2015-12-21 16:57:13 +00:00
<*> parseBatchOption
2015-12-22 16:20:39 +00:00
<*> switch
( long "with-files"
<> help "parse batch mode lines of the form \"$url $file\""
)
2015-07-13 15:06:41 +00:00
parseRelaxedOption :: Parser Bool
parseRelaxedOption = switch
( long "relaxed"
<> help "skip size check"
)
parseRawOption :: Parser Bool
parseRawOption = switch
( long "raw"
<> help "disable special handling for torrents, quvi, etc"
)
2012-02-16 16:25:19 +00:00
2015-07-13 14:57:49 +00:00
seek :: AddUrlOptions -> CommandSeek
2015-12-21 16:57:13 +00:00
seek o = allowConcurrentOutput $ do
2015-12-22 16:20:39 +00:00
forM_ (addUrls o) (\u -> go (o, u))
2015-12-21 16:57:13 +00:00
case batchOption o of
2015-12-22 16:20:39 +00:00
Batch -> batchInput (parseBatchInput o) go
2015-12-21 16:57:13 +00:00
NoBatch -> noop
where
2015-12-22 16:20:39 +00:00
go (o', u) = do
2015-11-05 22:24:15 +00:00
r <- Remote.claimingUrl u
2015-12-22 16:20:39 +00:00
if Remote.uuid r == webUUID || rawOption o'
then void $ commandAction $ startWeb o' u
else checkUrl r o' u
parseBatchInput :: AddUrlOptions -> String -> Either String (AddUrlOptions, URLString)
parseBatchInput o s
| batchFilesOption o =
let (u, f) = separate (== ' ') s
in if null u || null f
then Left ("parsed empty url or filename in input: " ++ s)
else Right (o { fileOption = Just f }, u)
| otherwise = Right (o, s)
checkUrl :: Remote -> AddUrlOptions -> URLString -> Annex ()
checkUrl r o u = do
pathmax <- liftIO $ fileNameLengthLimit "."
let deffile = fromMaybe (urlString2file u (pathdepthOption o) pathmax) (fileOption o)
go deffile =<< maybe
(error $ "unable to checkUrl of " ++ Remote.name r)
(tryNonAsync . flip id u)
(Remote.checkUrl r)
where
go _ (Left e) = void $ commandAction $ do
showStart "addurl" u
warning (show e)
next $ next $ return False
go deffile (Right (UrlContents sz mf)) = do
let f = adjustFile o (fromMaybe (maybe deffile fromSafeFilePath mf) (fileOption o))
void $ commandAction $
startRemote r (relaxedOption o) f u sz
go deffile (Right (UrlMulti l))
| isNothing (fileOption o) =
forM_ l $ \(u', sz, f) -> do
let f' = adjustFile o (deffile </> fromSafeFilePath f)
void $ commandAction $
startRemote r (relaxedOption o) f' u' sz
| otherwise = giveup $ unwords
[ "That url contains multiple files according to the"
, Remote.name r
, " remote; cannot add it to a single file."
]
startRemote :: Remote -> Bool -> FilePath -> URLString -> Maybe Integer -> CommandStart
startRemote r relaxed file uri sz = do
2014-12-12 00:10:45 +00:00
pathmax <- liftIO $ fileNameLengthLimit "."
2014-12-12 00:13:57 +00:00
let file' = joinPath $ map (truncateFilePath pathmax) $ splitDirectories file
2014-12-12 00:10:45 +00:00
showStart "addurl" file'
2014-12-11 22:22:40 +00:00
showNote $ "from " ++ Remote.name r
2014-12-12 00:10:45 +00:00
next $ performRemote r relaxed uri file' sz
performRemote :: Remote -> Bool -> URLString -> FilePath -> Maybe Integer -> CommandPerform
performRemote r relaxed uri file sz = ifAnnexed file adduri geturi
where
loguri = setDownloader uri OtherDownloader
adduri = addUrlChecked relaxed loguri (Remote.uuid r) checkexistssize
checkexistssize key = return $ case sz of
Nothing -> (True, True)
Just n -> (True, n == fromMaybe n (keySize key))
geturi = next $ isJust <$> downloadRemoteFile r relaxed uri file sz
downloadRemoteFile :: Remote -> Bool -> URLString -> FilePath -> Maybe Integer -> Annex (Maybe Key)
downloadRemoteFile r relaxed uri file sz = checkCanAdd file $ do
let urlkey = Backend.URL.fromUrl uri sz
liftIO $ createDirectoryIfMissing True (parentDir file)
ifM (Annex.getState Annex.fast <||> pure relaxed)
( do
cleanup (Remote.uuid r) loguri file urlkey Nothing
return (Just urlkey)
, do
-- Set temporary url for the urlkey
-- so that the remote knows what url it
-- should use to download it.
2014-12-17 18:34:42 +00:00
setTempUrl urlkey loguri
let downloader = \dest p -> fst <$> Remote.retrieveKeyFile r urlkey (Just file) dest p
ret <- downloadWith downloader urlkey (Remote.uuid r) loguri file
removeTempUrl urlkey
return ret
)
where
loguri = setDownloader uri OtherDownloader
startWeb :: AddUrlOptions -> String -> CommandStart
startWeb o s = go $ fromMaybe bad $ parseURI urlstring
2012-11-12 05:05:04 +00:00
where
(urlstring, downloader) = getDownloader s
bad = fromMaybe (giveup $ "bad url " ++ urlstring) $
Url.parseURIRelaxed $ urlstring
2013-08-23 03:44:13 +00:00
go url = case downloader of
QuviDownloader -> usequvi
_ -> ifM (quviSupported urlstring)
( usequvi
, regulardownload url
)
2013-09-09 06:16:22 +00:00
regulardownload url = do
pathmax <- liftIO $ fileNameLengthLimit "."
urlinfo <- if relaxedOption o
then pure Url.assumeUrlExists
else Url.withUrlOptions (Url.getUrlInfo urlstring)
file <- adjustFile o <$> case fileOption o of
Just f -> pure f
Nothing -> case Url.urlSuggestedFile urlinfo of
Nothing -> pure $ url2file url (pathdepthOption o) pathmax
Just sf -> do
let f = truncateFilePath pathmax $
sanitizeFilePath sf
ifM (liftIO $ doesFileExist f <||> doesDirectoryExist f)
( pure $ url2file url (pathdepthOption o) pathmax
, pure f
)
2013-09-09 06:16:22 +00:00
showStart "addurl" file
next $ performWeb (relaxedOption o) urlstring file urlinfo
badquvi = giveup $ "quvi does not know how to download url " ++ urlstring
usequvi = do
page <- fromMaybe badquvi
<$> withQuviOptions Quvi.forceQuery [Quvi.quiet, Quvi.httponly] urlstring
let link = fromMaybe badquvi $ headMaybe $ Quvi.pageLinks page
2013-10-05 17:32:42 +00:00
pathmax <- liftIO $ fileNameLengthLimit "."
let file = adjustFile o $ flip fromMaybe (fileOption o) $
truncateFilePath pathmax $ sanitizeFilePath $
Quvi.pageTitle page ++ "." ++ fromMaybe "m" (Quvi.linkSuffix link)
2012-11-12 05:05:04 +00:00
showStart "addurl" file
next $ performQuvi (relaxedOption o) urlstring (Quvi.linkUrl link) file
performWeb :: Bool -> URLString -> FilePath -> Url.UrlInfo -> CommandPerform
performWeb relaxed url file urlinfo = ifAnnexed file addurl geturl
2014-12-11 20:11:38 +00:00
where
geturl = next $ isJust <$> addUrlFile relaxed url urlinfo file
addurl = addUrlChecked relaxed url webUUID $ \k -> return $
(Url.urlExists urlinfo, Url.urlSize urlinfo == keySize k)
2014-12-11 20:11:38 +00:00
performQuvi :: Bool -> URLString -> URLString -> FilePath -> CommandPerform
performQuvi relaxed pageurl videourl file = ifAnnexed file addurl geturl
where
quviurl = setDownloader pageurl QuviDownloader
addurl key = next $ do
cleanup webUUID quviurl file key Nothing
return True
geturl = next $ isJust <$> addUrlFileQuvi relaxed quviurl videourl file
2013-12-29 19:52:20 +00:00
addUrlFileQuvi :: Bool -> URLString -> URLString -> FilePath -> Annex (Maybe Key)
addUrlFileQuvi relaxed quviurl videourl file = checkCanAdd file $ do
let key = Backend.URL.fromUrl quviurl Nothing
2013-12-29 19:52:20 +00:00
ifM (pure relaxed <||> Annex.getState Annex.fast)
( do
cleanup webUUID quviurl file key Nothing
return (Just key)
2013-12-29 19:52:20 +00:00
, do
{- Get the size, and use that to check
- disk space. However, the size info is not
- retained, because the size of a video stream
- might change and we want to be able to download
- it later. -}
urlinfo <- Url.withUrlOptions (Url.getUrlInfo videourl)
let sizedkey = addSizeUrlKey urlinfo key
2015-10-01 18:13:53 +00:00
checkDiskSpaceToGet sizedkey Nothing $ do
tmp <- fromRepo $ gitAnnexTmpObjectLocation key
showOutput
ok <- Transfer.notifyTransfer Transfer.Download (Just file) $
Transfer.download webUUID key (Just file) Transfer.forwardRetry $ \p -> do
liftIO $ createDirectoryIfMissing True (parentDir tmp)
downloadUrl key p [videourl] tmp
if ok
then do
cleanup webUUID quviurl file key (Just tmp)
return (Just key)
else return Nothing
2013-12-29 19:52:20 +00:00
)
2011-10-31 20:46:51 +00:00
addUrlChecked :: Bool -> URLString -> UUID -> (Key -> Annex (Bool, Bool)) -> Key -> CommandPerform
addUrlChecked relaxed url u checkexistssize key
| relaxed = do
setUrlPresent u key url
next $ return True
| otherwise = ifM ((elem url <$> getUrls key) <&&> (elem u <$> loggedLocations key))
( next $ return True -- nothing to do
, do
(exists, samesize) <- checkexistssize key
if exists && samesize
then do
setUrlPresent u key url
next $ return True
else do
warning $ "while adding a new url to an already annexed file, " ++ if exists
then "url does not have expected file size (use --relaxed to bypass this check) " ++ url
else "failed to verify url exists: " ++ url
stop
)
addUrlFile :: Bool -> URLString -> Url.UrlInfo -> FilePath -> Annex (Maybe Key)
addUrlFile relaxed url urlinfo file = checkCanAdd file $ do
liftIO $ createDirectoryIfMissing True (parentDir file)
ifM (Annex.getState Annex.fast <||> pure relaxed)
2015-01-27 18:53:06 +00:00
( nodownload url urlinfo file
, downloadWeb url urlinfo file
)
downloadWeb :: URLString -> Url.UrlInfo -> FilePath -> Annex (Maybe Key)
downloadWeb url urlinfo file = do
let dummykey = addSizeUrlKey urlinfo $ Backend.URL.fromUrl url Nothing
let downloader f p = do
showOutput
downloadUrl dummykey p [url] f
showAction $ "downloading " ++ url ++ " "
downloadWith downloader dummykey webUUID url file
{- The Key should be a dummy key, based on the URL, which is used
- for this download, before we can examine the file and find its real key.
- For resuming downloads to work, the dummy key for a given url should be
- stable. -}
downloadWith :: (FilePath -> MeterUpdate -> Annex Bool) -> Key -> UUID -> URLString -> FilePath -> Annex (Maybe Key)
downloadWith downloader dummykey u url file =
2015-10-01 18:13:53 +00:00
checkDiskSpaceToGet dummykey Nothing $ do
tmp <- fromRepo $ gitAnnexTmpObjectLocation dummykey
ifM (runtransfer tmp)
( do
backend <- chooseBackend file
let source = KeySource
{ keyFilename = file
, contentLocation = tmp
, inodeCache = Nothing
}
k <- genKey source backend
case k of
Nothing -> return Nothing
Just (key, _) -> do
cleanup u url file key (Just tmp)
return (Just key)
, return Nothing
)
where
runtransfer tmp = Transfer.notifyTransfer Transfer.Download (Just file) $
Transfer.download u dummykey (Just file) Transfer.forwardRetry $ \p -> do
liftIO $ createDirectoryIfMissing True (parentDir tmp)
downloader tmp p
{- Adds the url size to the Key. -}
addSizeUrlKey :: Url.UrlInfo -> Key -> Key
addSizeUrlKey urlinfo key = key { keySize = Url.urlSize urlinfo }
cleanup :: UUID -> URLString -> FilePath -> Key -> Maybe FilePath -> Annex ()
cleanup u url file key mtmp = case mtmp of
Nothing -> go
Just tmp -> do
largematcher <- largeFilesMatcher
ifM (checkFileMatcher largematcher tmp)
( go
, do
liftIO $ renameFile tmp file
void $ Command.Add.addSmall file
)
where
go = do
maybeShowJSON $ JSONChunk [("key", key2file key)]
when (isJust mtmp) $
logStatus key InfoPresent
setUrlPresent u key url
addAnnexedFile file key mtmp
2011-07-01 21:15:46 +00:00
2015-01-27 18:53:06 +00:00
nodownload :: URLString -> Url.UrlInfo -> FilePath -> Annex (Maybe Key)
nodownload url urlinfo file
| Url.urlExists urlinfo = do
let key = Backend.URL.fromUrl url (Url.urlSize urlinfo)
cleanup webUUID url file key Nothing
return (Just key)
| otherwise = do
warning $ "unable to access url: " ++ url
return Nothing
Fix a few bugs involving filenames that are at or near the filesystem's maximum filename length limit. Started with a problem when running addurl on a really long url, because the whole url is munged into the filename. Ended up doing a fairly extensive review for places where filenames could get too large, although it's hard to say I'm not missed any.. Backend.Url had a 128 character limit, which is fine when the limit is 255, but not if it's a lot shorter on some systems. So check the pathconf() limit. Note that this could result in fromUrl creating different keys for the same url, if run on systems with different limits. I don't see this is likely to cause any problems. That can already happen when using addurl --fast, or if the content of an url changes. Both Command.AddUrl and Backend.Url assumed that urls don't contain a lot of multi-byte unicode, and would fail to truncate an url that did properly. A few places use a filename as the template to make a temp file. While that's nice in that the temp file name can be easily related back to the original filename, it could lead to `git annex add` failing to add a filename that was at or close to the maximum length. Note that in Command.Add.lockdown, the template is still derived from the filename, just with enough space left to turn it into a temp file. This is an important optimisation, because the assistant may lock down a bunch of files all at once, and using the same template for all of them would cause openTempFile to iterate through the same set of names, looking for an unused temp file. I'm not very happy with the relatedTemplate hack, but it avoids that slowdown. Backend.WORM does not limit the filename stored in the key. I have not tried to change that; so git annex add will fail on really long filenames when using the WORM backend. It seems better to preserve the invariant that a WORM key always contains the complete filename, since the filename is the only unique material in the key, other than mtime and size. Since nobody has complained about add failing (I think I saw it once?) on WORM, probably it's ok, or nobody but me uses it. There may be compatability problems if using git annex addurl --fast or the WORM backend on a system with the 255 limit and then trying to use that repo in a system with a smaller limit. I have not tried to deal with those. This commit was sponsored by Alexander Brem. Thanks!
2013-07-30 21:49:11 +00:00
url2file :: URI -> Maybe Int -> Int -> FilePath
url2file url pathdepth pathmax = case pathdepth of
2013-10-05 17:32:42 +00:00
Nothing -> truncateFilePath pathmax $ sanitizeFilePath fullurl
2012-02-16 16:25:19 +00:00
Just depth
| depth >= length urlbits -> frombits id
| depth > 0 -> frombits $ drop depth
2012-02-16 18:28:17 +00:00
| depth < 0 -> frombits $ reverse . take (negate depth) . reverse
| otherwise -> giveup "bad --pathdepth"
2012-11-12 05:05:04 +00:00
where
fullurl = concat
[ maybe "" uriRegName (uriAuthority url)
, uriPath url
, uriQuery url
]
frombits a = intercalate "/" $ a urlbits
urlbits = map (truncateFilePath pathmax . sanitizeFilePath) $
filter (not . null) $ split "/" fullurl
urlString2file :: URLString -> Maybe Int -> Int -> FilePath
urlString2file s pathdepth pathmax = case Url.parseURIRelaxed s of
Nothing -> giveup $ "bad uri " ++ s
Just u -> url2file u pathdepth pathmax
adjustFile :: AddUrlOptions -> FilePath -> FilePath
adjustFile o = addprefix . addsuffix
where
addprefix f = maybe f (++ f) (prefixOption o)
addsuffix f = maybe f (f ++) (suffixOption o)
checkCanAdd :: FilePath -> Annex (Maybe a) -> Annex (Maybe a)
checkCanAdd file a = ifM (isJust <$> (liftIO $ catchMaybeIO $ getSymbolicLinkStatus file))
( do
warning $ file ++ " already exists and is not annexed; not overwriting"
return Nothing
, ifM ((not <$> Annex.getState Annex.force) <&&> checkIgnored file)
( do
warning $ "not adding " ++ file ++ " which is .gitignored (use --force to override)"
return Nothing
, a
)
)