2014-12-17 03:22:46 +00:00
|
|
|
{- BitTorrent remote.
|
|
|
|
-
|
2015-01-21 16:50:09 +00:00
|
|
|
- Copyright 2014 Joey Hess <id@joeyh.name>
|
2014-12-17 03:22:46 +00:00
|
|
|
-
|
|
|
|
- Licensed under the GNU GPL version 3 or higher.
|
|
|
|
-}
|
|
|
|
|
2014-12-18 18:22:43 +00:00
|
|
|
{-# LANGUAGE CPP #-}
|
|
|
|
|
2014-12-17 03:22:46 +00:00
|
|
|
module Remote.BitTorrent (remote) where
|
|
|
|
|
2016-01-20 20:36:33 +00:00
|
|
|
import Annex.Common
|
2014-12-17 03:22:46 +00:00
|
|
|
import Types.Remote
|
|
|
|
import qualified Annex
|
|
|
|
import qualified Git
|
|
|
|
import qualified Git.Construct
|
|
|
|
import Config.Cost
|
|
|
|
import Logs.Web
|
|
|
|
import Types.UrlContents
|
|
|
|
import Types.CleanupActions
|
2015-04-03 20:48:30 +00:00
|
|
|
import Messages.Progress
|
2014-12-17 03:22:46 +00:00
|
|
|
import Utility.Metered
|
|
|
|
import Utility.Tmp
|
2016-12-24 18:46:31 +00:00
|
|
|
import Utility.FileSystemEncoding
|
2014-12-17 03:22:46 +00:00
|
|
|
import Backend.URL
|
|
|
|
import Annex.Perms
|
2014-12-17 17:57:52 +00:00
|
|
|
import Annex.UUID
|
2014-12-17 03:22:46 +00:00
|
|
|
import qualified Annex.Url as Url
|
|
|
|
|
|
|
|
import Network.URI
|
|
|
|
|
2014-12-18 18:22:43 +00:00
|
|
|
#ifdef WITH_TORRENTPARSER
|
|
|
|
import Data.Torrent
|
|
|
|
import qualified Data.ByteString.Lazy as B
|
|
|
|
#endif
|
|
|
|
|
2014-12-17 03:22:46 +00:00
|
|
|
remote :: RemoteType
|
|
|
|
remote = RemoteType {
|
|
|
|
typename = "bittorrent",
|
|
|
|
enumerate = list,
|
|
|
|
generate = gen,
|
|
|
|
setup = error "not supported"
|
|
|
|
}
|
|
|
|
|
|
|
|
-- There is only one bittorrent remote, and it always exists.
|
2015-08-05 17:49:54 +00:00
|
|
|
list :: Bool -> Annex [Git.Repo]
|
|
|
|
list _autoinit = do
|
2015-02-12 19:33:05 +00:00
|
|
|
r <- liftIO $ Git.Construct.remoteNamed "bittorrent" (pure Git.Construct.fromUnknown)
|
2014-12-17 03:22:46 +00:00
|
|
|
return [r]
|
|
|
|
|
|
|
|
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
|
|
|
|
gen r _ c gc =
|
|
|
|
return $ Just Remote
|
|
|
|
{ uuid = bitTorrentUUID
|
|
|
|
, cost = expensiveRemoteCost
|
|
|
|
, name = Git.repoDescribe r
|
|
|
|
, storeKey = uploadKey
|
|
|
|
, retrieveKeyFile = downloadKey
|
|
|
|
, retrieveKeyFileCheap = downloadKeyCheap
|
|
|
|
, removeKey = dropKey
|
2015-10-08 19:01:38 +00:00
|
|
|
, lockContent = Nothing
|
2014-12-17 03:22:46 +00:00
|
|
|
, checkPresent = checkKey
|
|
|
|
, checkPresentCheap = False
|
add API for exporting
Implemented so far for the directory special remote.
Several remotes don't make sense to export to. Regular Git remotes,
obviously, do not. Bup remotes almost certianly do not, since bup would
need to be used to extract the export; same store for Ddar. Web and
Bittorrent are download-only. GCrypt is always encrypted so exporting to
it would be pointless. There's probably no point complicating the Hook
remotes with exporting at this point. External, S3, Glacier, WebDAV,
Rsync, and possibly Tahoe should be modified to support export.
Thought about trying to reuse the storeKey/retrieveKeyFile/removeKey
interface, rather than adding a new interface. But, it seemed better to
keep it separate, to avoid a complicated interface that sometimes
encrypts/chunks key/value storage and sometimes users non-key/value
storage. Any common parts can be factored out.
Note that storeExport is not atomic.
doc/design/exporting_trees_to_special_remotes.mdwn has some things in
the "resuming exports" section that bear on this decision. Basically,
I don't think, at this time, that an atomic storeExport would help with
resuming, because exports are not key/value storage, and we can't be
sure that a partially uploaded file is the same content we're currently
trying to export.
Also, note that ExportLocation will always use unix path separators.
This is important, because users may export from a mix of windows and
unix, and it avoids complicating the API with path conversions,
and ensures that in such a mix, they always use the same locations for
exports.
This commit was sponsored by Bruno BEAUFILS on Patreon.
2017-08-29 17:00:41 +00:00
|
|
|
, storeExport = Nothing
|
|
|
|
, retrieveExport = Nothing
|
|
|
|
, removeExport = Nothing
|
|
|
|
, checkPresentExport = Nothing
|
|
|
|
, renameExport = Nothing
|
2014-12-17 03:22:46 +00:00
|
|
|
, whereisKey = Nothing
|
|
|
|
, remoteFsck = Nothing
|
|
|
|
, repairRepo = Nothing
|
|
|
|
, config = c
|
|
|
|
, gitconfig = gc
|
|
|
|
, localpath = Nothing
|
|
|
|
, repo = r
|
|
|
|
, readonly = True
|
|
|
|
, availability = GloballyAvailable
|
|
|
|
, remotetype = remote
|
|
|
|
, mkUnavailable = return Nothing
|
|
|
|
, getInfo = return []
|
|
|
|
, claimUrl = Just (pure . isSupportedUrl)
|
|
|
|
, checkUrl = Just checkTorrentUrl
|
|
|
|
}
|
|
|
|
|
other 80% of avoding verification when hard linking to objects in shared repo
In c6632ee5c8e66c26ef18317f56ae02bae1e7e280, it actually only handled
uploading objects to a shared repository. To avoid verification when
downloading objects from a shared repository, was a lot harder.
On the plus side, if the process of downloading a file from a remote
is able to verify its content on the side, the remote can indicate this
now, and avoid the extra post-download verification.
As of yet, I don't have any remotes (except Git) using this ability.
Some more work would be needed to support it in special remotes.
It would make sense for tahoe to implicitly verify things downloaded from it;
as long as you trust your tahoe server (which typically runs locally),
there's cryptographic integrity. OTOH, despite bup being based on shas,
a bup repo under an attacker's control could have the git ref used for an
object changed, and so a bup repo shouldn't implicitly verify. Indeed,
tahoe seems unique in being trustworthy enough to implicitly verify.
2015-10-02 17:56:42 +00:00
|
|
|
downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex (Bool, Verification)
|
|
|
|
downloadKey key _file dest p = unVerified $
|
2014-12-17 03:22:46 +00:00
|
|
|
get . map (torrentUrlNum . fst . getDownloader) =<< getBitTorrentUrls key
|
|
|
|
where
|
|
|
|
get [] = do
|
2014-12-17 18:38:04 +00:00
|
|
|
warning "could not download torrent"
|
2014-12-17 03:22:46 +00:00
|
|
|
return False
|
|
|
|
get urls = do
|
|
|
|
showOutput -- make way for download progress bar
|
|
|
|
untilTrue urls $ \(u, filenum) -> do
|
|
|
|
registerTorrentCleanup u
|
|
|
|
checkDependencies
|
2014-12-17 18:38:04 +00:00
|
|
|
ifM (downloadTorrentFile u)
|
|
|
|
( downloadTorrentContent key u dest filenum p
|
|
|
|
, return False
|
|
|
|
)
|
2014-12-17 03:22:46 +00:00
|
|
|
|
2015-04-14 20:35:10 +00:00
|
|
|
downloadKeyCheap :: Key -> AssociatedFile -> FilePath -> Annex Bool
|
|
|
|
downloadKeyCheap _ _ _ = return False
|
2014-12-17 03:22:46 +00:00
|
|
|
|
|
|
|
uploadKey :: Key -> AssociatedFile -> MeterUpdate -> Annex Bool
|
|
|
|
uploadKey _ _ _ = do
|
|
|
|
warning "upload to bittorrent not supported"
|
|
|
|
return False
|
|
|
|
|
|
|
|
dropKey :: Key -> Annex Bool
|
|
|
|
dropKey k = do
|
|
|
|
mapM_ (setUrlMissing bitTorrentUUID k) =<< getBitTorrentUrls k
|
|
|
|
return True
|
|
|
|
|
2014-12-17 18:54:54 +00:00
|
|
|
{- We punt and don't try to check if a torrent has enough seeders
|
|
|
|
- with all the pieces etc. That would be quite hard.. and even if
|
|
|
|
- implemented, it tells us nothing about the later state of the torrent.
|
2014-12-17 03:22:46 +00:00
|
|
|
-}
|
|
|
|
checkKey :: Key -> Annex Bool
|
2016-11-16 01:29:54 +00:00
|
|
|
checkKey = giveup "cannot reliably check torrent status"
|
2014-12-17 03:22:46 +00:00
|
|
|
|
|
|
|
getBitTorrentUrls :: Key -> Annex [URLString]
|
|
|
|
getBitTorrentUrls key = filter supported <$> getUrls key
|
|
|
|
where
|
|
|
|
supported u =
|
|
|
|
let (u', dl) = (getDownloader u)
|
|
|
|
in dl == OtherDownloader && isSupportedUrl u'
|
|
|
|
|
|
|
|
isSupportedUrl :: URLString -> Bool
|
|
|
|
isSupportedUrl u = isTorrentMagnetUrl u || isTorrentUrl u
|
|
|
|
|
|
|
|
isTorrentUrl :: URLString -> Bool
|
|
|
|
isTorrentUrl = maybe False (\u -> ".torrent" `isSuffixOf` uriPath u) . parseURI
|
|
|
|
|
|
|
|
isTorrentMagnetUrl :: URLString -> Bool
|
|
|
|
isTorrentMagnetUrl u = "magnet:" `isPrefixOf` u && checkbt (parseURI u)
|
|
|
|
where
|
|
|
|
checkbt (Just uri) | "xt=urn:btih:" `isInfixOf` uriQuery uri = True
|
|
|
|
checkbt _ = False
|
|
|
|
|
|
|
|
checkTorrentUrl :: URLString -> Annex UrlContents
|
|
|
|
checkTorrentUrl u = do
|
|
|
|
checkDependencies
|
|
|
|
registerTorrentCleanup u
|
|
|
|
ifM (downloadTorrentFile u)
|
|
|
|
( torrentContents u
|
2016-11-16 01:29:54 +00:00
|
|
|
, giveup "could not download torrent file"
|
2014-12-17 03:22:46 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
{- To specify which file inside a multi-url torrent, the file number is
|
|
|
|
- appended to the url. -}
|
|
|
|
torrentUrlWithNum :: URLString -> Int -> URLString
|
|
|
|
torrentUrlWithNum u n = u ++ "#" ++ show n
|
|
|
|
|
|
|
|
torrentUrlNum :: URLString -> (URLString, Int)
|
2014-12-17 18:07:05 +00:00
|
|
|
torrentUrlNum u
|
|
|
|
| '#' `elem` u =
|
|
|
|
let (n, ru) = separate (== '#') (reverse u)
|
|
|
|
in (reverse ru, fromMaybe 1 $ readish $ reverse n)
|
|
|
|
| otherwise = (u, 1)
|
2014-12-17 03:22:46 +00:00
|
|
|
|
|
|
|
{- A Key corresponding to the URL of a torrent file. -}
|
|
|
|
torrentUrlKey :: URLString -> Annex Key
|
2015-05-23 02:41:36 +00:00
|
|
|
torrentUrlKey u = return $ fromUrl (fst $ torrentUrlNum u) Nothing
|
2014-12-17 03:22:46 +00:00
|
|
|
|
|
|
|
{- Temporary directory used to download a torrent. -}
|
|
|
|
tmpTorrentDir :: URLString -> Annex FilePath
|
|
|
|
tmpTorrentDir u = do
|
|
|
|
d <- fromRepo gitAnnexTmpMiscDir
|
|
|
|
f <- keyFile <$> torrentUrlKey u
|
|
|
|
return (d </> f)
|
|
|
|
|
|
|
|
{- Temporary filename to use to store the torrent file. -}
|
|
|
|
tmpTorrentFile :: URLString -> Annex FilePath
|
|
|
|
tmpTorrentFile u = fromRepo . gitAnnexTmpObjectLocation =<< torrentUrlKey u
|
|
|
|
|
|
|
|
{- A cleanup action is registered to delete the torrent file and its
|
|
|
|
- associated temp directory when git-annex exits.
|
|
|
|
-
|
|
|
|
- This allows multiple actions that use the same torrent file and temp
|
|
|
|
- directory to run in a single git-annex run.
|
|
|
|
-}
|
|
|
|
registerTorrentCleanup :: URLString -> Annex ()
|
|
|
|
registerTorrentCleanup u = Annex.addCleanup (TorrentCleanup u) $ do
|
|
|
|
liftIO . nukeFile =<< tmpTorrentFile u
|
|
|
|
d <- tmpTorrentDir u
|
|
|
|
liftIO $ whenM (doesDirectoryExist d) $
|
|
|
|
removeDirectoryRecursive d
|
|
|
|
|
|
|
|
{- Downloads the torrent file. (Not its contents.) -}
|
|
|
|
downloadTorrentFile :: URLString -> Annex Bool
|
|
|
|
downloadTorrentFile u = do
|
|
|
|
torrent <- tmpTorrentFile u
|
|
|
|
ifM (liftIO $ doesFileExist torrent)
|
|
|
|
( return True
|
|
|
|
, do
|
|
|
|
showAction "downloading torrent file"
|
|
|
|
showOutput
|
2015-01-09 17:11:56 +00:00
|
|
|
createAnnexDirectory (parentDir torrent)
|
2014-12-17 03:22:46 +00:00
|
|
|
if isTorrentMagnetUrl u
|
|
|
|
then do
|
|
|
|
tmpdir <- tmpTorrentDir u
|
|
|
|
let metadir = tmpdir </> "meta"
|
|
|
|
createAnnexDirectory metadir
|
|
|
|
ok <- downloadMagnetLink u metadir torrent
|
|
|
|
liftIO $ removeDirectoryRecursive metadir
|
|
|
|
return ok
|
|
|
|
else do
|
|
|
|
misctmp <- fromRepo gitAnnexTmpMiscDir
|
2014-12-30 17:07:06 +00:00
|
|
|
withTmpFileIn misctmp "torrent" $ \f h -> do
|
|
|
|
liftIO $ hClose h
|
2014-12-17 03:22:46 +00:00
|
|
|
ok <- Url.withUrlOptions $ Url.download u f
|
|
|
|
when ok $
|
|
|
|
liftIO $ renameFile f torrent
|
|
|
|
return ok
|
|
|
|
)
|
|
|
|
|
|
|
|
downloadMagnetLink :: URLString -> FilePath -> FilePath -> Annex Bool
|
|
|
|
downloadMagnetLink u metadir dest = ifM download
|
|
|
|
( liftIO $ do
|
2015-04-14 16:57:01 +00:00
|
|
|
ts <- filter (".torrent" `isSuffixOf`)
|
2014-12-17 03:22:46 +00:00
|
|
|
<$> dirContents metadir
|
|
|
|
case ts of
|
|
|
|
(t:[]) -> do
|
|
|
|
renameFile t dest
|
|
|
|
return True
|
|
|
|
_ -> return False
|
|
|
|
, return False
|
|
|
|
)
|
|
|
|
where
|
|
|
|
download = runAria
|
|
|
|
[ Param "--bt-metadata-only"
|
|
|
|
, Param "--bt-save-metadata"
|
|
|
|
, Param u
|
|
|
|
, Param "--seed-time=0"
|
2014-12-17 17:40:04 +00:00
|
|
|
, Param "--summary-interval=0"
|
2014-12-17 03:22:46 +00:00
|
|
|
, Param "-d"
|
|
|
|
, File metadir
|
|
|
|
]
|
|
|
|
|
2014-12-17 17:40:04 +00:00
|
|
|
downloadTorrentContent :: Key -> URLString -> FilePath -> Int -> MeterUpdate -> Annex Bool
|
|
|
|
downloadTorrentContent k u dest filenum p = do
|
2014-12-17 03:22:46 +00:00
|
|
|
torrent <- tmpTorrentFile u
|
|
|
|
tmpdir <- tmpTorrentDir u
|
|
|
|
createAnnexDirectory tmpdir
|
|
|
|
f <- wantedfile torrent
|
|
|
|
showOutput
|
|
|
|
ifM (download torrent tmpdir <&&> liftIO (doesFileExist (tmpdir </> f)))
|
|
|
|
( do
|
|
|
|
liftIO $ renameFile (tmpdir </> f) dest
|
|
|
|
return True
|
|
|
|
, return False
|
|
|
|
)
|
|
|
|
where
|
2014-12-17 17:40:04 +00:00
|
|
|
download torrent tmpdir = ariaProgress (keySize k) p
|
2014-12-17 03:22:46 +00:00
|
|
|
[ Param $ "--select-file=" ++ show filenum
|
|
|
|
, File torrent
|
|
|
|
, Param "-d"
|
|
|
|
, File tmpdir
|
|
|
|
, Param "--seed-time=0"
|
2014-12-17 17:40:04 +00:00
|
|
|
, Param "--summary-interval=0"
|
|
|
|
, Param "--file-allocation=none"
|
2014-12-17 18:21:48 +00:00
|
|
|
-- Needed so aria will resume partially downloaded files
|
|
|
|
-- in multi-file torrents.
|
|
|
|
, Param "--check-integrity=true"
|
2014-12-17 03:22:46 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
{- aria2c will create part of the directory structure
|
|
|
|
- contained in the torrent. It may download parts of other files
|
|
|
|
- in addition to the one we asked for. So, we need to find
|
|
|
|
- out the filename we want based on the filenum.
|
|
|
|
-}
|
|
|
|
wantedfile torrent = do
|
|
|
|
fs <- liftIO $ map fst <$> torrentFileSizes torrent
|
|
|
|
if length fs >= filenum
|
2014-12-17 18:21:48 +00:00
|
|
|
then return (fs !! (filenum - 1))
|
2016-11-16 01:29:54 +00:00
|
|
|
else giveup "Number of files in torrent seems to have changed."
|
2014-12-17 03:22:46 +00:00
|
|
|
|
|
|
|
checkDependencies :: Annex ()
|
|
|
|
checkDependencies = do
|
2014-12-18 18:22:43 +00:00
|
|
|
missing <- liftIO $ filterM (not <$$> inPath) deps
|
2014-12-17 03:22:46 +00:00
|
|
|
unless (null missing) $
|
2016-11-16 01:29:54 +00:00
|
|
|
giveup $ "need to install additional software in order to download from bittorrent: " ++ unwords missing
|
2014-12-18 18:22:43 +00:00
|
|
|
where
|
|
|
|
deps =
|
|
|
|
[ "aria2c"
|
|
|
|
#ifndef TORRENT
|
|
|
|
, "btshowmetainfo"
|
|
|
|
#endif
|
|
|
|
]
|
2014-12-17 03:22:46 +00:00
|
|
|
|
2014-12-17 17:40:04 +00:00
|
|
|
ariaParams :: [CommandParam] -> Annex [CommandParam]
|
|
|
|
ariaParams ps = do
|
2014-12-17 03:22:46 +00:00
|
|
|
opts <- map Param . annexAriaTorrentOptions <$> Annex.getGitConfig
|
2014-12-17 17:40:04 +00:00
|
|
|
return (ps ++ opts)
|
|
|
|
|
|
|
|
runAria :: [CommandParam] -> Annex Bool
|
2015-04-04 18:34:03 +00:00
|
|
|
runAria ps = progressCommand "aria2c" =<< ariaParams ps
|
2014-12-17 17:40:04 +00:00
|
|
|
|
|
|
|
-- Parse aria output to find "(n%)" and update the progress meter
|
2015-04-03 20:48:30 +00:00
|
|
|
-- with it.
|
2014-12-17 17:40:04 +00:00
|
|
|
ariaProgress :: Maybe Integer -> MeterUpdate -> [CommandParam] -> Annex Bool
|
|
|
|
ariaProgress Nothing _ ps = runAria ps
|
2015-04-03 20:48:30 +00:00
|
|
|
ariaProgress (Just sz) meter ps = do
|
2015-04-04 18:34:03 +00:00
|
|
|
oh <- mkOutputHandler
|
|
|
|
liftIO . commandMeter (parseAriaProgress sz) oh meter "aria2c"
|
2014-12-17 17:40:04 +00:00
|
|
|
=<< ariaParams ps
|
|
|
|
|
|
|
|
parseAriaProgress :: Integer -> ProgressParser
|
2017-01-31 22:40:42 +00:00
|
|
|
parseAriaProgress totalsize = go [] . reverse . splitc '\r'
|
2014-12-17 17:40:04 +00:00
|
|
|
where
|
|
|
|
go remainder [] = (Nothing, remainder)
|
|
|
|
go remainder (x:xs) = case readish (findpercent x) of
|
|
|
|
Nothing -> go (x++remainder) xs
|
|
|
|
Just p -> (Just (frompercent p), remainder)
|
|
|
|
|
|
|
|
-- "(N%)"
|
|
|
|
findpercent = takeWhile (/= '%') . drop 1 . dropWhile (/= '(')
|
|
|
|
|
|
|
|
frompercent p = toBytesProcessed $ totalsize * p `div` 100
|
2014-12-17 03:22:46 +00:00
|
|
|
|
2014-12-18 18:22:43 +00:00
|
|
|
{- Used only if the haskell torrent library is not available. -}
|
|
|
|
#ifndef WITH_TORRENTPARSER
|
2014-12-17 03:22:46 +00:00
|
|
|
btshowmetainfo :: FilePath -> String -> IO [String]
|
|
|
|
btshowmetainfo torrent field =
|
|
|
|
findfield [] . lines <$> readProcess "btshowmetainfo" [torrent]
|
|
|
|
where
|
|
|
|
findfield c [] = reverse c
|
|
|
|
findfield c (l:ls)
|
|
|
|
| l == fieldkey = multiline c ls
|
|
|
|
| fieldkey `isPrefixOf` l =
|
|
|
|
findfield ((drop (length fieldkey) l):c) ls
|
|
|
|
| otherwise = findfield c ls
|
|
|
|
|
|
|
|
multiline c (l:ls)
|
|
|
|
| " " `isPrefixOf` l = multiline (drop 3 l:c) ls
|
|
|
|
| otherwise = findfield c ls
|
|
|
|
multiline c [] = findfield c []
|
|
|
|
|
|
|
|
fieldkey = field ++ take (14 - length field) (repeat '.') ++ ": "
|
2014-12-18 18:22:43 +00:00
|
|
|
#endif
|
2014-12-17 03:22:46 +00:00
|
|
|
|
|
|
|
{- Examines the torrent file and gets the list of files in it,
|
|
|
|
- and their sizes.
|
|
|
|
-}
|
|
|
|
torrentFileSizes :: FilePath -> IO [(FilePath, Integer)]
|
|
|
|
torrentFileSizes torrent = do
|
2014-12-18 18:22:43 +00:00
|
|
|
#ifdef WITH_TORRENTPARSER
|
|
|
|
let mkfile = joinPath . map (scrub . decodeBS)
|
|
|
|
b <- B.readFile torrent
|
|
|
|
return $ case readTorrent b of
|
2016-11-16 01:29:54 +00:00
|
|
|
Left e -> giveup $ "failed to parse torrent: " ++ e
|
2014-12-18 18:22:43 +00:00
|
|
|
Right t -> case tInfo t of
|
|
|
|
SingleFile { tLength = l, tName = f } ->
|
|
|
|
[ (mkfile [f], l) ]
|
|
|
|
MultiFile { tFiles = fs, tName = dir } ->
|
|
|
|
map (\tf -> (mkfile $ dir:filePath tf, fileLength tf)) fs
|
|
|
|
where
|
|
|
|
#else
|
2014-12-17 03:22:46 +00:00
|
|
|
files <- getfield "files"
|
|
|
|
if null files
|
|
|
|
then do
|
|
|
|
fnl <- getfield "file name"
|
|
|
|
szl <- map readish <$> getfield "file size"
|
|
|
|
case (fnl, szl) of
|
|
|
|
((fn:[]), (Just sz:[])) -> return [(scrub fn, sz)]
|
|
|
|
_ -> parsefailed (show (fnl, szl))
|
|
|
|
else do
|
2014-12-18 18:22:43 +00:00
|
|
|
v <- getfield "directory name"
|
2014-12-17 03:22:46 +00:00
|
|
|
case v of
|
|
|
|
(d:[]) -> return $ map (splitsize d) files
|
|
|
|
_ -> parsefailed (show v)
|
|
|
|
where
|
|
|
|
getfield = btshowmetainfo torrent
|
2016-11-16 01:29:54 +00:00
|
|
|
parsefailed s = giveup $ "failed to parse btshowmetainfo output for torrent file: " ++ show s
|
2014-12-17 03:22:46 +00:00
|
|
|
|
|
|
|
-- btshowmetainfo outputs a list of "filename (size)"
|
|
|
|
splitsize d l = (scrub (d </> fn), sz)
|
|
|
|
where
|
|
|
|
sz = fromMaybe (parsefailed l) $ readish $
|
|
|
|
reverse $ takeWhile (/= '(') $ dropWhile (== ')') $
|
|
|
|
reverse l
|
|
|
|
fn = reverse $ drop 2 $
|
|
|
|
dropWhile (/= '(') $ dropWhile (== ')') $ reverse l
|
2014-12-18 18:22:43 +00:00
|
|
|
#endif
|
2014-12-17 18:17:19 +00:00
|
|
|
-- a malicious torrent file might try to do directory traversal
|
2014-12-17 03:22:46 +00:00
|
|
|
scrub f = if isAbsolute f || any (== "..") (splitPath f)
|
2016-11-16 01:29:54 +00:00
|
|
|
then giveup "found unsafe filename in torrent!"
|
2014-12-17 03:22:46 +00:00
|
|
|
else f
|
|
|
|
|
|
|
|
torrentContents :: URLString -> Annex UrlContents
|
|
|
|
torrentContents u = convert
|
|
|
|
<$> (liftIO . torrentFileSizes =<< tmpTorrentFile u)
|
|
|
|
where
|
|
|
|
convert [(fn, sz)] = UrlContents (Just sz) (Just (mkSafeFilePath fn))
|
|
|
|
convert l = UrlMulti $ map mkmulti (zip l [1..])
|
|
|
|
|
|
|
|
mkmulti ((fn, sz), n) =
|
2014-12-17 18:17:19 +00:00
|
|
|
(torrentUrlWithNum u n, Just sz, mkSafeFilePath $ joinPath $ drop 1 $ splitPath fn)
|