addurl --preserve-filename for other remotes
Finishing work begun in 6952060665
Also, truncate filenames provided by other remotes if they're too long,
when --preserve-filename is not used. That seems to have been omitted
before by accident.
This commit is contained in:
parent
5f5170b22b
commit
39d7e6dd2a
3 changed files with 27 additions and 15 deletions
|
@ -136,14 +136,16 @@ checkUrl addunlockedmatcher r o u = do
|
|||
go _ (Left e) = void $ commandAction $ startingAddUrl u o $ do
|
||||
warning (show e)
|
||||
next $ return False
|
||||
go deffile (Right (UrlContents sz f)) = do
|
||||
let f = adjustFile o (fromMaybe (maybe deffile sanitizeFilePath mf) (fileOption (downloadOptions o)))
|
||||
void $ commandAction $ startRemote addunlockedmatcher r o f u sz
|
||||
go deffile (Right (UrlContents sz mf)) = do
|
||||
f <- maybe (pure deffile) (sanitizeOrPreserveFilePath o) mf
|
||||
let f' = adjustFile o (fromMaybe f (fileOption (downloadOptions o)))
|
||||
void $ commandAction $ startRemote addunlockedmatcher r o f' u sz
|
||||
go deffile (Right (UrlMulti l)) = case fileOption (downloadOptions o) of
|
||||
Nothing ->
|
||||
forM_ l $ \(u', sz, f) -> do
|
||||
let f' = adjustFile o (deffile </> sanitizeFilePath f)
|
||||
void $ commandAction $ startRemote addunlockedmatcher r o f' u' sz
|
||||
f' <- sanitizeOrPreserveFilePath o f
|
||||
let f'' = adjustFile o (deffile </> sanitizeFilePath f')
|
||||
void $ commandAction $ startRemote addunlockedmatcher r o f'' u' sz
|
||||
Just f -> case l of
|
||||
[] -> noop
|
||||
((u',sz,_):[]) -> do
|
||||
|
@ -215,20 +217,26 @@ startWeb addunlockedmatcher o urlstring = go $ fromMaybe bad $ parseURI urlstrin
|
|||
file <- adjustFile o <$> case fileOption (downloadOptions o) of
|
||||
Just f -> pure f
|
||||
Nothing -> case Url.urlSuggestedFile urlinfo of
|
||||
Just sf | not (null sf) -> if preserveFilenameOption (downloadOptions o)
|
||||
then do
|
||||
checkPreserveFileNameSecurity sf
|
||||
return sf
|
||||
else do
|
||||
let f = truncateFilePath pathmax $
|
||||
sanitizeFilePath sf
|
||||
ifM (liftIO $ doesFileExist f <||> doesDirectoryExist f)
|
||||
Just sf -> do
|
||||
f <- sanitizeOrPreserveFilePath o sf
|
||||
if preserveFilenameOption (downloadOptions o)
|
||||
then pure f
|
||||
else ifM (liftIO $ doesFileExist f <||> doesDirectoryExist f)
|
||||
( pure $ url2file url (pathdepthOption o) pathmax
|
||||
, pure f
|
||||
)
|
||||
_ -> pure $ url2file url (pathdepthOption o) pathmax
|
||||
performWeb addunlockedmatcher o urlstring file urlinfo
|
||||
|
||||
sanitizeOrPreserveFilePath :: AddUrlOptions -> FilePath -> Annex FilePath
|
||||
sanitizeOrPreserveFilePath o f
|
||||
| preserveFilenameOption (downloadOptions o) && not (null f) = do
|
||||
checkPreserveFileNameSecurity f
|
||||
return f
|
||||
| otherwise = do
|
||||
pathmax <- liftIO $ fileNameLengthLimit "."
|
||||
return $ truncateFilePath pathmax $ sanitizeFilePath f
|
||||
|
||||
-- sanitizeFilePath avoids all these security problems
|
||||
-- (and probably others, but at least this catches the most egrarious ones).
|
||||
checkPreserveFileNameSecurity :: FilePath -> Annex ()
|
||||
|
|
|
@ -45,3 +45,5 @@ git-annex version: 7.20190708+git9-gfa3524b95-1~ndall+1
|
|||
|
||||
[[!meta author=yoh]]
|
||||
[[!tag projects/dandi]]
|
||||
|
||||
> [[done]] --[[Joey]]
|
||||
|
|
|
@ -3,10 +3,12 @@
|
|||
subject="""comment 5"""
|
||||
date="2020-05-11T17:20:07Z"
|
||||
content="""
|
||||
I agree that it may as well allow non-leading '-'.
|
||||
I agree that it may as well allow non-leading '-'. But, if you are relying
|
||||
on getting the unsanitized filename generally, you should use
|
||||
--preserve-filename
|
||||
|
||||
Web browsers do do some santization, particulary of '/'.
|
||||
Chrome removes leading "." as well. Often files are downloaded to locations
|
||||
Chrome removes leading "." as well. Often files are downloaded
|
||||
without the user confirming it. I suspect there is enough insecurity
|
||||
in that area that someone could make a living injecting bitcoin miners into
|
||||
dotfiles.
|
||||
|
|
Loading…
Reference in a new issue