2015-08-06 19:02:25 +00:00
|
|
|
{- git-annex hashing backends
|
2011-03-01 20:50:53 +00:00
|
|
|
-
|
2015-08-06 19:02:25 +00:00
|
|
|
- Copyright 2011-2015 Joey Hess <id@joeyh.name>
|
2011-03-01 20:50:53 +00:00
|
|
|
-
|
|
|
|
- Licensed under the GNU GPL version 3 or higher.
|
|
|
|
-}
|
|
|
|
|
2016-02-27 14:55:02 +00:00
|
|
|
{-# LANGUAGE CPP #-}
|
2015-08-06 19:02:25 +00:00
|
|
|
|
2014-08-01 19:09:49 +00:00
|
|
|
module Backend.Hash (
|
|
|
|
backends,
|
|
|
|
testKeyBackend,
|
|
|
|
) where
|
2011-03-01 20:50:53 +00:00
|
|
|
|
2016-01-20 20:36:33 +00:00
|
|
|
import Annex.Common
|
2011-03-01 20:50:53 +00:00
|
|
|
import qualified Annex
|
2011-06-02 01:56:04 +00:00
|
|
|
import Types.Backend
|
2012-06-20 20:07:14 +00:00
|
|
|
import Types.KeySource
|
Use cryptohash rather than SHA for hashing.
This is a massive win on OSX, which doesn't have a sha256sum normally.
Only use external hash commands when the file is > 1 mb,
since cryptohash is quite close to them in speed.
SHA is still used to calculate HMACs. I don't quite understand
cryptohash's API for those.
Used the following benchmark to arrive at the 1 mb number.
1 mb file:
benchmarking sha256/internal
mean: 13.86696 ms, lb 13.83010 ms, ub 13.93453 ms, ci 0.950
std dev: 249.3235 us, lb 162.0448 us, ub 458.1744 us, ci 0.950
found 5 outliers among 100 samples (5.0%)
4 (4.0%) high mild
1 (1.0%) high severe
variance introduced by outliers: 10.415%
variance is moderately inflated by outliers
benchmarking sha256/external
mean: 14.20670 ms, lb 14.17237 ms, ub 14.27004 ms, ci 0.950
std dev: 230.5448 us, lb 150.7310 us, ub 427.6068 us, ci 0.950
found 3 outliers among 100 samples (3.0%)
2 (2.0%) high mild
1 (1.0%) high severe
2 mb file:
benchmarking sha256/internal
mean: 26.44270 ms, lb 26.23701 ms, ub 26.63414 ms, ci 0.950
std dev: 1.012303 ms, lb 925.8921 us, ub 1.122267 ms, ci 0.950
variance introduced by outliers: 35.540%
variance is moderately inflated by outliers
benchmarking sha256/external
mean: 26.84521 ms, lb 26.77644 ms, ub 26.91433 ms, ci 0.950
std dev: 347.7867 us, lb 210.6283 us, ub 571.3351 us, ci 0.950
found 6 outliers among 100 samples (6.0%)
import Crypto.Hash
import Data.ByteString.Lazy as L
import Criterion.Main
import Common
testfile :: FilePath
testfile = "/run/shm/data" -- on ram disk
main = defaultMain
[ bgroup "sha256"
[ bench "internal" $ whnfIO internal
, bench "external" $ whnfIO external
]
]
sha256 :: L.ByteString -> Digest SHA256
sha256 = hashlazy
internal :: IO String
internal = show . sha256 <$> L.readFile testfile
external :: IO String
external = do
s <- readProcess "sha256sum" [testfile]
return $ fst $ separate (== ' ') s
2013-09-22 23:45:08 +00:00
|
|
|
import Utility.Hash
|
2013-05-08 15:17:09 +00:00
|
|
|
import Utility.ExternalSHA
|
2012-07-04 13:08:20 +00:00
|
|
|
|
2011-08-20 20:11:42 +00:00
|
|
|
import qualified Build.SysConfig as SysConfig
|
2012-07-04 13:08:20 +00:00
|
|
|
import qualified Data.ByteString.Lazy as L
|
2012-12-20 21:16:55 +00:00
|
|
|
import Data.Char
|
2011-03-01 20:50:53 +00:00
|
|
|
|
2015-08-06 19:02:25 +00:00
|
|
|
data Hash
|
|
|
|
= MD5Hash
|
|
|
|
| SHA1Hash
|
|
|
|
| SHA2Hash HashSize
|
|
|
|
| SHA3Hash HashSize
|
|
|
|
| SkeinHash HashSize
|
2013-10-02 00:34:06 +00:00
|
|
|
type HashSize = Int
|
2011-05-16 15:46:34 +00:00
|
|
|
|
2012-09-12 17:22:16 +00:00
|
|
|
{- Order is slightly significant; want SHA256 first, and more general
|
|
|
|
- sizes earlier. -}
|
2013-10-02 00:34:06 +00:00
|
|
|
hashes :: [Hash]
|
|
|
|
hashes = concat
|
2015-08-06 19:02:25 +00:00
|
|
|
[ map SHA2Hash [256, 512, 224, 384]
|
|
|
|
#ifdef WITH_CRYPTONITE
|
|
|
|
, map SHA3Hash [256, 512, 224, 384]
|
|
|
|
#endif
|
2013-10-02 00:34:06 +00:00
|
|
|
, map SkeinHash [256, 512]
|
2015-08-06 19:02:25 +00:00
|
|
|
, [SHA1Hash]
|
2015-02-04 17:47:54 +00:00
|
|
|
, [MD5Hash]
|
2013-10-02 00:34:06 +00:00
|
|
|
]
|
2011-05-16 15:46:34 +00:00
|
|
|
|
2013-10-02 00:34:06 +00:00
|
|
|
{- The SHA256E backend is the default, so genBackendE comes first. -}
|
2011-12-31 08:11:39 +00:00
|
|
|
backends :: [Backend]
|
2015-08-06 19:02:25 +00:00
|
|
|
backends = concatMap (\h -> [genBackendE h, genBackend h]) hashes
|
2011-03-02 17:47:45 +00:00
|
|
|
|
2014-08-01 19:09:49 +00:00
|
|
|
genBackend :: Hash -> Backend
|
|
|
|
genBackend hash = Backend
|
2013-10-02 00:34:06 +00:00
|
|
|
{ name = hashName hash
|
|
|
|
, getKey = keyValue hash
|
2015-10-01 17:28:49 +00:00
|
|
|
, verifyKeyContent = Just $ checkKeyChecksum hash
|
2013-09-25 07:09:06 +00:00
|
|
|
, canUpgradeKey = Just needsUpgrade
|
2014-07-10 21:06:04 +00:00
|
|
|
, fastMigrate = Just trivialMigrate
|
2014-07-27 16:33:46 +00:00
|
|
|
, isStableKey = const True
|
2012-07-04 13:08:20 +00:00
|
|
|
}
|
2011-04-08 00:08:11 +00:00
|
|
|
|
2014-08-01 19:09:49 +00:00
|
|
|
genBackendE :: Hash -> Backend
|
|
|
|
genBackendE hash = (genBackend hash)
|
|
|
|
{ name = hashNameE hash
|
|
|
|
, getKey = keyValueE hash
|
|
|
|
}
|
2011-05-16 15:46:34 +00:00
|
|
|
|
2013-10-02 00:34:06 +00:00
|
|
|
hashName :: Hash -> String
|
2015-02-04 17:47:54 +00:00
|
|
|
hashName MD5Hash = "MD5"
|
2015-08-06 19:02:25 +00:00
|
|
|
hashName SHA1Hash = "SHA1"
|
|
|
|
hashName (SHA2Hash size) = "SHA" ++ show size
|
|
|
|
hashName (SHA3Hash size) = "SHA3_" ++ show size
|
|
|
|
hashName (SkeinHash size) = "SKEIN" ++ show size
|
2011-03-01 20:50:53 +00:00
|
|
|
|
2013-10-02 00:34:06 +00:00
|
|
|
hashNameE :: Hash -> String
|
|
|
|
hashNameE hash = hashName hash ++ "E"
|
2011-05-16 15:46:34 +00:00
|
|
|
|
2013-10-02 00:34:06 +00:00
|
|
|
{- A key is a hash of its contents. -}
|
|
|
|
keyValue :: Hash -> KeySource -> Annex (Maybe Key)
|
|
|
|
keyValue hash source = do
|
2012-06-05 23:51:03 +00:00
|
|
|
let file = contentLocation source
|
2015-01-20 20:58:48 +00:00
|
|
|
filesize <- liftIO $ getFileSize file
|
2013-10-02 00:34:06 +00:00
|
|
|
s <- hashFile hash file filesize
|
2011-05-16 15:46:34 +00:00
|
|
|
return $ Just $ stubKey
|
|
|
|
{ keyName = s
|
2013-10-02 00:34:06 +00:00
|
|
|
, keyBackendName = hashName hash
|
2012-07-04 17:04:01 +00:00
|
|
|
, keySize = Just filesize
|
2011-05-16 15:46:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
{- Extension preserving keys. -}
|
2013-10-02 00:34:06 +00:00
|
|
|
keyValueE :: Hash -> KeySource -> Annex (Maybe Key)
|
|
|
|
keyValueE hash source = keyValue hash source >>= maybe (return Nothing) addE
|
2012-11-11 04:51:07 +00:00
|
|
|
where
|
|
|
|
addE k = return $ Just $ k
|
|
|
|
{ keyName = keyName k ++ selectExtension (keyFilename source)
|
2013-10-02 00:34:06 +00:00
|
|
|
, keyBackendName = hashNameE hash
|
2012-11-11 04:51:07 +00:00
|
|
|
}
|
2012-07-05 22:24:02 +00:00
|
|
|
|
|
|
|
selectExtension :: FilePath -> String
|
2012-07-06 23:22:56 +00:00
|
|
|
selectExtension f
|
|
|
|
| null es = ""
|
2013-04-23 00:24:53 +00:00
|
|
|
| otherwise = intercalate "." ("":es)
|
2012-11-11 04:51:07 +00:00
|
|
|
where
|
|
|
|
es = filter (not . null) $ reverse $
|
2016-05-27 17:14:51 +00:00
|
|
|
take 2 $ map (filter validInExtension) $
|
|
|
|
takeWhile shortenough $
|
2017-01-31 22:40:42 +00:00
|
|
|
reverse $ splitc '.' $ takeExtensions f
|
2012-12-20 21:16:55 +00:00
|
|
|
shortenough e = length e <= 4 -- long enough for "jpeg"
|
2011-03-01 20:50:53 +00:00
|
|
|
|
2011-08-06 16:50:20 +00:00
|
|
|
{- A key's checksum is checked during fsck. -}
|
2013-10-02 00:34:06 +00:00
|
|
|
checkKeyChecksum :: Hash -> Key -> FilePath -> Annex Bool
|
2015-12-06 20:26:38 +00:00
|
|
|
checkKeyChecksum hash key file = catchIOErrorType HardwareFault hwfault $ do
|
|
|
|
fast <- Annex.getState Annex.fast
|
|
|
|
mstat <- liftIO $ catchMaybeIO $ getFileStatus file
|
|
|
|
case (mstat, fast) of
|
|
|
|
(Just stat, False) -> do
|
|
|
|
filesize <- liftIO $ getFileSize' file stat
|
|
|
|
showAction "checksum"
|
|
|
|
check <$> hashFile hash file filesize
|
|
|
|
_ -> return True
|
2012-11-11 04:51:07 +00:00
|
|
|
where
|
2013-10-02 00:34:06 +00:00
|
|
|
expected = keyHash key
|
2012-11-11 04:51:07 +00:00
|
|
|
check s
|
2013-10-02 00:34:06 +00:00
|
|
|
| s == expected = True
|
2012-12-20 19:43:14 +00:00
|
|
|
{- A bug caused checksums to be prefixed with \ in some
|
|
|
|
- cases; still accept these as legal now that the bug has been
|
|
|
|
- fixed. -}
|
2013-10-02 00:34:06 +00:00
|
|
|
| '\\' : s == expected = True
|
2012-11-11 04:51:07 +00:00
|
|
|
| otherwise = False
|
2012-12-20 19:43:14 +00:00
|
|
|
|
2015-05-27 20:40:03 +00:00
|
|
|
hwfault e = do
|
|
|
|
warning $ "hardware fault: " ++ show e
|
|
|
|
return False
|
|
|
|
|
2013-10-02 00:34:06 +00:00
|
|
|
keyHash :: Key -> String
|
|
|
|
keyHash key = dropExtensions (keyName key)
|
2012-12-20 19:43:14 +00:00
|
|
|
|
2016-05-27 17:10:23 +00:00
|
|
|
validInExtension :: Char -> Bool
|
|
|
|
validInExtension c
|
2012-12-20 21:16:55 +00:00
|
|
|
| isAlphaNum c = True
|
|
|
|
| c == '.' = True
|
|
|
|
| otherwise = False
|
|
|
|
|
|
|
|
{- Upgrade keys that have the \ prefix on their sha due to a bug, or
|
|
|
|
- that contain non-alphanumeric characters in their extension. -}
|
2012-12-20 19:43:14 +00:00
|
|
|
needsUpgrade :: Key -> Bool
|
2013-10-02 00:34:06 +00:00
|
|
|
needsUpgrade key = "\\" `isPrefixOf` keyHash key ||
|
2016-05-27 17:10:23 +00:00
|
|
|
any (not . validInExtension) (takeExtensions $ keyName key)
|
2013-10-02 00:34:06 +00:00
|
|
|
|
2015-01-04 16:33:10 +00:00
|
|
|
trivialMigrate :: Key -> Backend -> AssociatedFile -> Maybe Key
|
|
|
|
trivialMigrate oldkey newbackend afile
|
|
|
|
{- Fast migration from hashE to hash backend. -}
|
2014-07-10 21:06:04 +00:00
|
|
|
| keyBackendName oldkey == name newbackend ++ "E" = Just $ oldkey
|
|
|
|
{ keyName = keyHash oldkey
|
|
|
|
, keyBackendName = name newbackend
|
|
|
|
}
|
2015-01-04 16:33:10 +00:00
|
|
|
{- Fast migration from hash to hashE backend. -}
|
|
|
|
| keyBackendName oldkey ++"E" == name newbackend = case afile of
|
|
|
|
Nothing -> Nothing
|
|
|
|
Just file -> Just $ oldkey
|
|
|
|
{ keyName = keyHash oldkey ++ selectExtension file
|
|
|
|
, keyBackendName = name newbackend
|
|
|
|
}
|
2014-07-10 21:06:04 +00:00
|
|
|
| otherwise = Nothing
|
|
|
|
|
2013-10-02 00:34:06 +00:00
|
|
|
hashFile :: Hash -> FilePath -> Integer -> Annex String
|
if external hash command fails for any reason, fall back to internal hashing
This way, if a system's sha1sum etc is broken, it will be tried if
git-annex was built to use it, but at least it will fall back to using
internal hashing when it fails.
A side benefit of this is that hashFile consistently throws an IOError if
the file is unable to be read. In particular, if the disk is failing with
IO errors, and external hash command is used, it used to throw a user error
with the error message from externalSHA. Now, the external hash command
will fail, that message will be printed as a warning, and it'll fall back
to the internal hash command. If the disk IO error is not intermittent, it
will re-occur, and so an IOError will be thrown.
Of course, this can mean it reads a file twice, but only in edge cases.
2015-05-27 19:58:32 +00:00
|
|
|
hashFile hash file filesize = go hash
|
2013-10-02 00:34:06 +00:00
|
|
|
where
|
2015-08-06 19:02:25 +00:00
|
|
|
go MD5Hash = use md5Hasher
|
|
|
|
go SHA1Hash = usehasher 1
|
|
|
|
go (SHA2Hash hashsize) = usehasher hashsize
|
|
|
|
go (SHA3Hash hashsize) = use (sha3Hasher hashsize)
|
|
|
|
go (SkeinHash hashsize) = use (skeinHasher hashsize)
|
|
|
|
|
2016-02-26 18:04:10 +00:00
|
|
|
use hasher = liftIO $ do
|
2016-02-26 20:36:24 +00:00
|
|
|
h <- hasher <$> L.readFile file
|
|
|
|
-- Force full evaluation so file is read and closed.
|
|
|
|
return (length h `seq` h)
|
2015-08-06 19:02:25 +00:00
|
|
|
|
|
|
|
usehasher hashsize = case shaHasher hashsize filesize of
|
if external hash command fails for any reason, fall back to internal hashing
This way, if a system's sha1sum etc is broken, it will be tried if
git-annex was built to use it, but at least it will fall back to using
internal hashing when it fails.
A side benefit of this is that hashFile consistently throws an IOError if
the file is unable to be read. In particular, if the disk is failing with
IO errors, and external hash command is used, it used to throw a user error
with the error message from externalSHA. Now, the external hash command
will fail, that message will be printed as a warning, and it'll fall back
to the internal hash command. If the disk IO error is not intermittent, it
will re-occur, and so an IOError will be thrown.
Of course, this can mean it reads a file twice, but only in edge cases.
2015-05-27 19:58:32 +00:00
|
|
|
Left sha -> use sha
|
|
|
|
Right (external, internal) -> do
|
|
|
|
v <- liftIO $ externalSHA external hashsize file
|
|
|
|
case v of
|
|
|
|
Right r -> return r
|
|
|
|
Left e -> do
|
|
|
|
warning e
|
|
|
|
-- fall back to internal since
|
|
|
|
-- external command failed
|
|
|
|
use internal
|
|
|
|
|
|
|
|
shaHasher :: HashSize -> Integer -> Either (L.ByteString -> String) (String, L.ByteString -> String)
|
2013-10-02 02:32:44 +00:00
|
|
|
shaHasher hashsize filesize
|
2013-10-02 00:34:06 +00:00
|
|
|
| hashsize == 1 = use SysConfig.sha1 sha1
|
2015-08-06 19:02:25 +00:00
|
|
|
| hashsize == 256 = use SysConfig.sha256 sha2_256
|
|
|
|
| hashsize == 224 = use SysConfig.sha224 sha2_224
|
|
|
|
| hashsize == 384 = use SysConfig.sha384 sha2_384
|
|
|
|
| hashsize == 512 = use SysConfig.sha512 sha2_512
|
|
|
|
| otherwise = error $ "unsupported SHA size " ++ show hashsize
|
2013-10-02 00:34:06 +00:00
|
|
|
where
|
2015-05-27 20:00:44 +00:00
|
|
|
use Nothing hasher = Left $ usehasher hasher
|
2013-10-02 00:34:06 +00:00
|
|
|
use (Just c) hasher
|
|
|
|
{- Use builtin, but slightly slower hashing for
|
|
|
|
- smallish files. Cryptohash benchmarks 90 to 101%
|
|
|
|
- faster than external hashers, depending on the hash
|
|
|
|
- and system. So there is no point forking an external
|
|
|
|
- process unless the file is large. -}
|
2015-05-27 20:00:44 +00:00
|
|
|
| filesize < 1048576 = Left $ usehasher hasher
|
|
|
|
| otherwise = Right (c, usehasher hasher)
|
|
|
|
usehasher hasher = show . hasher
|
2013-10-02 01:10:56 +00:00
|
|
|
|
2015-08-06 19:02:25 +00:00
|
|
|
sha3Hasher :: HashSize -> (L.ByteString -> String)
|
|
|
|
sha3Hasher hashsize
|
|
|
|
#ifdef WITH_CRYPTONITE
|
|
|
|
| hashsize == 256 = show . sha3_256
|
|
|
|
| hashsize == 224 = show . sha3_224
|
|
|
|
| hashsize == 384 = show . sha3_384
|
|
|
|
| hashsize == 512 = show . sha3_512
|
|
|
|
#endif
|
|
|
|
| otherwise = error $ "unsupported SHA3 size " ++ show hashsize
|
|
|
|
|
2013-10-02 01:10:56 +00:00
|
|
|
skeinHasher :: HashSize -> (L.ByteString -> String)
|
|
|
|
skeinHasher hashsize
|
|
|
|
| hashsize == 256 = show . skein256
|
|
|
|
| hashsize == 512 = show . skein512
|
2015-08-06 19:02:25 +00:00
|
|
|
| otherwise = error $ "unsupported SKEIN size " ++ show hashsize
|
2014-08-01 19:09:49 +00:00
|
|
|
|
2015-02-04 17:47:54 +00:00
|
|
|
md5Hasher :: L.ByteString -> String
|
|
|
|
md5Hasher = show . md5
|
|
|
|
|
2014-08-01 19:09:49 +00:00
|
|
|
{- A varient of the SHA256E backend, for testing that needs special keys
|
|
|
|
- that cannot collide with legitimate keys in the repository.
|
|
|
|
-
|
|
|
|
- This is accomplished by appending a special extension to the key,
|
|
|
|
- that is not one that selectExtension would select (due to being too
|
|
|
|
- long).
|
|
|
|
-}
|
|
|
|
testKeyBackend :: Backend
|
|
|
|
testKeyBackend =
|
2015-08-06 19:02:25 +00:00
|
|
|
let b = genBackendE (SHA2Hash 256)
|
2014-08-01 19:09:49 +00:00
|
|
|
in b { getKey = (fmap addE) <$$> getKey b }
|
|
|
|
where
|
|
|
|
addE k = k { keyName = keyName k ++ longext }
|
|
|
|
longext = ".this-is-a-test-key"
|